mdbq 2.9.5__py3-none-any.whl → 2.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1150,51 +1150,39 @@ def upload_dir(path, db_name, collection_name, json_path=None):
1150
1150
  df = pd.read_csv(os.path.join(root, name), encoding='utf-8_sig', header=0, na_filter=False)
1151
1151
  if name.endswith('.xlsx'):
1152
1152
  df = pd.read_excel(os.path.join(root, name), sheet_name=0, header=0, engine='openpyxl')
1153
- try:
1154
- if len(df) == 0:
1155
- continue
1156
- # if '新版' not in name:
1157
- # continue
1158
- cv = converter.DataFrameConverter()
1159
- df = cv.convert_df_cols(df=df) # 清理列名和 df 中的非法字符
1160
-
1161
- try:
1162
- df = df.astype(dtypes) # 按本地文件更新 df 的数据类型, 可能因为字段不同产生异常
1163
- except Exception as e:
1164
- print(name, e)
1165
- # 如果发生异常,这将 df 的数据和 json 中的数据取交集
1166
- old_dt = df.dtypes.apply(str).to_dict() # 将 dataframe 数据类型转为字典形式
1167
- intersection_keys = dtypes.keys() & old_dt.keys() # 获取两个字典键的交集
1168
- dtypes = {k: dtypes[k] for k in intersection_keys} # 使用交集的键创建新字典
1169
- df = df.astype(dtypes) # 再次更新 df 的数据类型
1170
- df.fillna(0, inplace=True)
1171
-
1172
- # for col in df.columns.tolist():
1173
- # df[col] = df[col].apply(lambda x: 0 if str(x) == '' else x)
1174
- # print(f'{i}/{count}')
1175
- # sql_engine = create_engine(
1176
- # f"mysql+pymysql://{username}:{password}@{host}:{port}/{db_name}") # 创建数据库引擎
1177
- # df.to_sql(
1178
- # name=collection_name,
1179
- # con=sql_engine,
1180
- # if_exists='append',
1181
- # index=False,
1182
- # chunksize=1000
1183
- # )
1153
+ # try:
1154
+ if len(df) == 0:
1155
+ continue
1156
+ # if '新版' not in name:
1157
+ # continue
1158
+ # cv = converter.DataFrameConverter()
1159
+ # df = cv.convert_df_cols(df=df) # 清理列名和 df 中的非法字符
1184
1160
 
1161
+ # try:
1162
+ # df = df.astype(dtypes) # 按本地文件更新 df 的数据类型, 可能因为字段不同产生异常
1163
+ # except Exception as e:
1164
+ # print(name, e)
1165
+ # # 如果发生异常,这将 df 的数据和 json 中的数据取交集
1166
+ # old_dt = df.dtypes.apply(str).to_dict() # 将 dataframe 数据类型转为字典形式
1167
+ # intersection_keys = dtypes.keys() & old_dt.keys() # 获取两个字典键的交集
1168
+ # dtypes = {k: dtypes[k] for k in intersection_keys} # 使用交集的键创建新字典
1169
+ # df = df.astype(dtypes) # 再次更新 df 的数据类型
1170
+ df.fillna(0, inplace=True)
1171
+ for col in df.columns.tolist():
1172
+ df[col] = df[col].apply(lambda x: 0 if str(x) == '' else x)
1185
1173
 
1174
+ # if '更新时间' not in df.columns.tolist():
1175
+ # df['更新时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
1176
+ #
1177
+ m.df_to_mysql(df=df, db_name=db_name, table_name=collection_name,
1178
+ move_insert=False, # 先删除,再插入
1179
+ df_sql = True,
1180
+ drop_duplicates=False,
1181
+ count=f'{i}/{count}',
1182
+ filename=name,
1183
+ )
1184
+ # nas.df_to_mysql(df=df, db_name=db_name, table_name=collection_name, drop_duplicates=True,)
1186
1185
 
1187
- #
1188
- m.df_to_mysql(df=df, db_name=db_name, table_name=collection_name,
1189
- move_insert=False, # 先删除,再插入
1190
- df_sql = True,
1191
- drop_duplicates=False,
1192
- count=f'{i}/{count}',
1193
- filename=name,
1194
- )
1195
- # nas.df_to_mysql(df=df, db_name=db_name, table_name=collection_name, drop_duplicates=True,)
1196
- except Exception as e:
1197
- print(name, e)
1198
1186
  i += 1
1199
1187
 
1200
1188
 
@@ -1282,13 +1270,13 @@ if __name__ == '__main__':
1282
1270
  # )
1283
1271
 
1284
1272
  # test()
1285
- col = 0
1273
+ col = 1
1286
1274
  if col:
1287
1275
  # 上传一个目录到指定数据库
1288
- db_name = '生意参谋3'
1289
- table_name = '店铺流量来源构成'
1276
+ db_name = '京东数据3'
1277
+ table_name = '推广数据_全站营销'
1290
1278
  upload_dir(
1291
- path=r'/Users/xigua/数据中心/原始文件3/生意参谋/店铺流量来源',
1279
+ path=r'/Users/xigua/数据中心/原始文件3/京东报表/京准通_全站营销',
1292
1280
  db_name=db_name,
1293
1281
  collection_name=table_name,
1294
1282
  )
@@ -2409,11 +2409,15 @@ def main(days=100, months=3):
2409
2409
 
2410
2410
 
2411
2411
  if __name__ == '__main__':
2412
- main(days=100, months=3)
2412
+ # main(days=100, months=3)
2413
2413
 
2414
2414
  # data_aggregation(
2415
2415
  # months=3,
2416
2416
  # is_juhe=True, # 生成聚合表
2417
2417
  # # less_dict=['天猫_品销宝账户报表'], # 单独聚合某一个数据库
2418
2418
  # )
2419
-
2419
+ data_aggregation(
2420
+ months=1,
2421
+ is_juhe=True, # 生成聚合表
2422
+ # less_dict=['天猫_品销宝账户报表'], # 单独聚合某一个数据库
2423
+ )
mdbq/config/products.py CHANGED
@@ -141,7 +141,7 @@ class Products:
141
141
  dict_data=dict_data,
142
142
  # icm_update=['日期', '店铺名称', '宝贝id'], # 唯一组合键
143
143
  unique_main_key=['商品id'],
144
- set_type={
144
+ set_typ={
145
145
  '商品id': 'mediumtext',
146
146
  '平台': 'mediumtext',
147
147
  '上市年份': 'mediumtext',
mdbq/mysql/mysql.py CHANGED
@@ -133,7 +133,7 @@ class MysqlUpload:
133
133
  return wrapper
134
134
 
135
135
  @try_except
136
- def dict_to_mysql(self, db_name, table_name, dict_data, icm_update=None, main_key=None, unique_main_key=None, index_length=100, set_type=None):
136
+ def dict_to_mysql(self, db_name, table_name, dict_data, icm_update=None, main_key=None, unique_main_key=None, index_length=100, set_typ=None):
137
137
  """
138
138
  插入字典数据
139
139
  dict_data: 字典
@@ -141,7 +141,7 @@ class MysqlUpload:
141
141
  unique_main_key: 指定唯一索引列
142
142
  index_length: 索引长度
143
143
  icm_update: 增量更正,指定后 main_key 只用于检查/创建列,不能更新数据
144
- set_type: {}
144
+ set_typ: {}
145
145
  """
146
146
  if not main_key:
147
147
  main_key = []
@@ -177,8 +177,11 @@ class MysqlUpload:
177
177
 
178
178
  # 根据 dict_data 的值添加指定的数据类型
179
179
  dtypes, dict_data = self.cover_dict_dtypes(dict_data=dict_data) # {'店铺名称': 'mediumtext',...}
180
- if set_type:
181
- dtypes.update(set_type) # 自定义的数据类型
180
+ if set_typ:
181
+ # 更新自定义的列数据类型
182
+ for k, v in dtypes.items():
183
+ # 确保传进来的 set_typ 键存在于实际的 df 列才 update
184
+ [dtypes.update({k: inside_v}) for inside_k, inside_v in set_typ.items() if k == inside_k]
182
185
 
183
186
  # 检查列
184
187
  sql = "SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = %s AND TABLE_NAME = %s;"
@@ -373,17 +376,17 @@ class MysqlUpload:
373
376
  cols = df.columns.tolist()
374
377
  for col in cols:
375
378
  df[col] = df[col].apply(lambda x: float(re.sub(r'%$', '', str(x))) / 100 if (
376
- str(x) != '' and str(x).endswith('%')) else '0.0' if str(x) == '0%' else x)
379
+ str(x) != '' and str(x).endswith('%')) and not re.findall('[\\u4e00-\\u9fa5]', str(x)) else '0.0' if str(x) == '0%' else x)
377
380
  try:
378
381
  df[col] = df[col].apply(
379
382
  lambda x: int(x) if '_' not in str(x) and '.' not in str(x) else x) # 不含小数点尝试转整数
380
383
  except:
381
384
  pass
382
- if df[col].dtype == 'object':
383
- try:
385
+ try:
386
+ if df[col].dtype == 'object': # 有些列没有被 pandas 识别数据类型,会没有 dtype 属性
384
387
  df[col] = df[col].apply(lambda x: float(x) if '.' in str(x) and '_' not in str(x) else x)
385
- except:
386
- pass
388
+ except:
389
+ pass
387
390
  new_col = col.lower()
388
391
  new_col = re.sub(r'[()\-,,&~^、 ()\"\'“”=·/。》《><!!`]', '_', new_col, re.IGNORECASE)
389
392
  new_col = new_col.replace(')', '')
@@ -429,7 +432,7 @@ class MysqlUpload:
429
432
  __res_dict.update({k: 'varchar(255)'})
430
433
  return __res_dict, df
431
434
 
432
- @try_except
435
+ # @try_except
433
436
  def df_to_mysql(self, df, db_name, table_name, set_typ=None, icm_update=[], move_insert=False, df_sql=False, drop_duplicates=False,
434
437
  filename=None, count=None, reset_id=False):
435
438
  """
@@ -529,18 +532,19 @@ class MysqlUpload:
529
532
  chunksize=1000
530
533
  )
531
534
  if reset_id:
532
- # 6. 重置自增列
533
- try:
534
- cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
535
- result = cursor.fetchone()
536
- if result:
537
- cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
538
- cursor.execute(
539
- f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
540
- cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
541
- except Exception as e:
542
- print(f'{e}')
543
- connection.rollback()
535
+ pass
536
+ # # 6. 重置自增列
537
+ # try:
538
+ # cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
539
+ # result = cursor.fetchone()
540
+ # if result:
541
+ # cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
542
+ # cursor.execute(
543
+ # f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
544
+ # cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
545
+ # except Exception as e:
546
+ # print(f'{e}')
547
+ # connection.rollback()
544
548
  connection.commit() # 提交事务
545
549
  connection.close()
546
550
  return
@@ -567,17 +571,18 @@ class MysqlUpload:
567
571
  )
568
572
  # 6. 重置自增列
569
573
  if reset_id:
570
- try:
571
- cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
572
- result = cursor.fetchone()
573
- if result:
574
- cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
575
- cursor.execute(
576
- f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
577
- cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
578
- except Exception as e:
579
- print(f'{e}')
580
- connection.rollback()
574
+ pass
575
+ # try:
576
+ # cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
577
+ # result = cursor.fetchone()
578
+ # if result:
579
+ # cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
580
+ # cursor.execute(
581
+ # f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
582
+ # cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
583
+ # except Exception as e:
584
+ # print(f'{e}')
585
+ # connection.rollback()
581
586
  connection.close()
582
587
  return
583
588
 
@@ -586,6 +591,9 @@ class MysqlUpload:
586
591
  # data 是传进来待处理的数据, 不是数据库数据
587
592
  # data 示例: {'日期': Timestamp('2024-08-27 00:00:00'), '推广费余额': 33299, '品销宝余额': 2930.73, '短信剩余': 67471}
588
593
  try:
594
+ cols = ', '.join(f"`{item}`" for item in data.keys()) # 列名需要转义
595
+ # data.update({item: f"{data[item]}" for item in data.keys()}) # 全部值转字符, 不是必须的
596
+ values = ', '.join([f'"{item}"' for item in data.values()]) # 值要加引号
589
597
  condition = []
590
598
  for k, v in data.items():
591
599
  condition += [f'`{k}` = "{v}"']
@@ -667,18 +675,18 @@ class MysqlUpload:
667
675
  print(f'mysql -> df_to_mysql 报错: {e}, {self.filename}')
668
676
  # breakpoint()
669
677
 
670
- # 6. 重置自增列
671
- try:
672
- cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
673
- result = cursor.fetchone()
674
- if result:
675
- cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
676
- cursor.execute(
677
- f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
678
- cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
679
- except Exception as e:
680
- print(f'{e}')
681
- connection.rollback()
678
+ # # 6. 重置自增列
679
+ # try:
680
+ # cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
681
+ # result = cursor.fetchone()
682
+ # if result:
683
+ # cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
684
+ # cursor.execute(
685
+ # f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
686
+ # cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
687
+ # except Exception as e:
688
+ # print(f'{e}')
689
+ # connection.rollback()
682
690
  connection.commit() # 提交事务
683
691
  connection.close()
684
692
 
@@ -908,18 +916,18 @@ class OptimizeDatas:
908
916
  else: # 不存在日期列的情况
909
917
  self.delete_duplicate2(table_name=table_name)
910
918
 
911
- # 5. 重置自增列 (id 列)
912
- try:
913
- cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
914
- result = cursor.fetchone()
915
- if result:
916
- cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
917
- cursor.execute(
918
- f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
919
- cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
920
- except Exception as e:
921
- print(f'{e}')
922
- self.connection.rollback()
919
+ # # 5. 重置自增列 (id 列)
920
+ # try:
921
+ # cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
922
+ # result = cursor.fetchone()
923
+ # if result:
924
+ # cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
925
+ # cursor.execute(
926
+ # f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
927
+ # cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
928
+ # except Exception as e:
929
+ # print(f'{e}')
930
+ # self.connection.rollback()
923
931
  self.connection.close()
924
932
  now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S ")
925
933
  print(f'{now}mysql({self.host}: {self.port}) {self.db_name} 数据库优化完成!')
mdbq/spider/aikucun.py CHANGED
@@ -20,6 +20,9 @@ from selenium.webdriver.common.keys import Keys
20
20
  from mdbq.aggregation import aggregation
21
21
  from mdbq.clean import data_clean
22
22
  from mdbq.other import ua_sj
23
+ from mdbq.mysql import mysql
24
+ from mdbq.config import myconfig
25
+ import socket
23
26
 
24
27
  warnings.filterwarnings('ignore')
25
28
 
@@ -44,6 +47,23 @@ else:
44
47
  Share_Path = str(pathlib.Path('/Volumes/时尚事业部/01.运营部/天猫报表')) # 共享文件根目录
45
48
  Source_Path = str(pathlib.Path(Data_Path, '原始文件2'))
46
49
  upload_path = os.path.join(D_PATH, '数据上传中心') # 此目录位于下载文件夹
50
+ if socket.gethostname() == 'company' or socket.gethostname() == 'Mac2.local':
51
+ conf = myconfig.main()
52
+ conf_data = conf['Windows']['company']['mysql']['local']
53
+ username, password, host, port = conf_data['username'], conf_data['password'], conf_data['host'], conf_data[
54
+ 'port']
55
+ else:
56
+ conf = myconfig.main()
57
+ conf_data = conf['Windows']['xigua_lx']['mysql']['local']
58
+ username, password, host, port = conf_data['username'], conf_data['password'], conf_data['host'], conf_data[
59
+ 'port']
60
+ m_engine = mysql.MysqlUpload(
61
+ username=username,
62
+ password=password,
63
+ host=host,
64
+ port=port,
65
+ charset='utf8mb4'
66
+ )
47
67
 
48
68
 
49
69
  def get_cookie_aikucun():
@@ -262,6 +282,12 @@ class AikuCun:
262
282
  _driver.quit()
263
283
 
264
284
  def clean_data(self, date):
285
+ set_typ = {
286
+ '店铺名称': 'varchar(100)',
287
+ 'spu_id': 'varchar(100)',
288
+ '图片': 'varchar(255)',
289
+ '数据更新时间': 'timestamp',
290
+ }
265
291
  for root, dirs, files in os.walk(upload_path, topdown=False):
266
292
  for name in files:
267
293
  if '~$' in name or 'DS_Store' in name:
@@ -285,6 +311,21 @@ class AikuCun:
285
311
  # df['数据更新时间'] = df['数据更新时间'].apply(lambda x: re.sub(' ', ' ', str(x)) if x else x)
286
312
  # print(df['数据更新时间'])
287
313
  # breakpoint()
314
+
315
+ m_engine.df_to_mysql(
316
+ df=df,
317
+ db_name='爱库存2',
318
+ table_name='商品spu榜单',
319
+ icm_update=[], # 增量更新, 在聚合数据中使用,其他不要用
320
+ move_insert=False, # 先删除,再插入
321
+ df_sql=True, # 值为 True 时使用 df.to_sql 函数上传整个表, 不会排重
322
+ drop_duplicates=False, # 值为 True 时检查重复数据再插入,反之直接上传,会比较慢
323
+ count=None,
324
+ filename=None, # 用来追踪处理进度
325
+ reset_id=False, # 是否重置自增列
326
+ set_typ=set_typ,
327
+ )
328
+
288
329
  new_name = f'爱库存_商品榜单_spu_{date}_{date}.csv'
289
330
  df.to_csv(os.path.join(root, new_name), encoding='utf-8_sig', index=False)
290
331
  os.remove(os.path.join(root, name))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 2.9.5
3
+ Version: 2.9.7
4
4
  Home-page: https://pypi.org/project/mdbq
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -1,11 +1,11 @@
1
1
  mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
2
2
  mdbq/__version__.py,sha256=y9Mp_8x0BCZSHsdLT_q5tX9wZwd5QgqrSIENLrb6vXA,62
3
3
  mdbq/aggregation/__init__.py,sha256=EeDqX2Aml6SPx8363J-v1lz0EcZtgwIBYyCJV6CcEDU,40
4
- mdbq/aggregation/aggregation.py,sha256=3d_sx-cFrW-c03D5Ry9jf144Ph3d0znIl3IHmnInsYA,73902
4
+ mdbq/aggregation/aggregation.py,sha256=2KCVXZygQt4xVxGbFcDMBpL3PukY4yQF_uI-qLSTWaU,73460
5
5
  mdbq/aggregation/df_types.py,sha256=U9i3q2eRPTDY8qAPTw7irzu-Tlg4CIySW9uYro81wdk,8125
6
6
  mdbq/aggregation/mysql_types.py,sha256=YTGyrF9vcRgfkQbpT-e-JdJ7c7VF1dDHgyx9YZRES8w,10934
7
7
  mdbq/aggregation/optimize_data.py,sha256=79uwiM2WqNNFxGpE2wKz742PRq-ZGgFjdOV0vgptHdY,3513
8
- mdbq/aggregation/query_data.py,sha256=0kiJQv7xLeH7kXxPmMiUUPYIlt5gcEyzSETmJTV372U,103891
8
+ mdbq/aggregation/query_data.py,sha256=r1FU0C4zjXln7oVSrRkElh4Ehl-9mYhGcq57jLbViUA,104071
9
9
  mdbq/bdup/__init__.py,sha256=AkhsGk81SkG1c8FqDH5tRq-8MZmFobVbN60DTyukYTY,28
10
10
  mdbq/bdup/bdup.py,sha256=LAV0TgnQpc-LB-YuJthxb0U42_VkPidzQzAagan46lU,4234
11
11
  mdbq/clean/__init__.py,sha256=A1d6x3L27j4NtLgiFV5TANwEkLuaDfPHDQNrPBbNWtU,41
@@ -16,7 +16,7 @@ mdbq/company/copysh.py,sha256=eFu6focRqm2Njn_XN1KW2ZYJiTv6EYgsdBCLokobyxQ,21572
16
16
  mdbq/config/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
17
17
  mdbq/config/get_myconf.py,sha256=cmNvsyoNa0RbZ9FOTjSd3jyyGwkxjUo0phvdHbGlrms,6010
18
18
  mdbq/config/myconfig.py,sha256=EGymTlAimtHIDJ9egCtOehBEPOj6rea504kvsEZu64o,854
19
- mdbq/config/products.py,sha256=StTRtphOmFccWxmb92lbIdQC6z83DpJVgYuVc4W7Rog,6296
19
+ mdbq/config/products.py,sha256=ykvoQiA4OvFEYQ35wmCkREECdz0xIJzIs-Xix9mFpYI,6295
20
20
  mdbq/config/set_support.py,sha256=xkZCX6y9Bq1ppBpJAofld4B2YtchA7fl0eT3dx3CrSI,777
21
21
  mdbq/config/update_conf.py,sha256=taL3ZqKgiVWwUrDFuaYhim9a72Hm4BHRhhDscJTziR8,4535
22
22
  mdbq/dataframe/__init__.py,sha256=2HtCN8AdRj53teXDqzysC1h8aPL-mMFy561ESmhehGQ,22
@@ -26,7 +26,7 @@ mdbq/log/mylogger.py,sha256=oaT7Bp-Hb9jZt52seP3ISUuxVcI19s4UiqTeouScBO0,3258
26
26
  mdbq/mongo/__init__.py,sha256=SILt7xMtQIQl_m-ik9WLtJSXIVf424iYgCfE_tnQFbw,13
27
27
  mdbq/mongo/mongo.py,sha256=v9qvrp6p1ZRWuPpbSilqveiE0FEcZF7U5xUPI0RN4xs,31880
28
28
  mdbq/mysql/__init__.py,sha256=A_DPJyAoEvTSFojiI2e94zP0FKtCkkwKP1kYUCSyQzo,11
29
- mdbq/mysql/mysql.py,sha256=Ivw2Ke4-4_oTZb4naB_kh4jP24MJUUAhdGViNx8LA1E,59276
29
+ mdbq/mysql/mysql.py,sha256=O5pmD_RB8TRKuatqL9Runrlr5E4XrLpDsPsi0vW_e8c,60075
30
30
  mdbq/mysql/recheck_mysql.py,sha256=rgTpvDMWYTyEn7UQdlig-pdXDluTgiU8JG6lkMh8DV0,8665
31
31
  mdbq/mysql/s_query.py,sha256=bgNNIqYLDCHjD5KTFcm6x4u74selpAGs5ouJYuqX86k,8447
32
32
  mdbq/mysql/year_month_day.py,sha256=VgewoE2pJxK7ErjfviL_SMTN77ki8GVbTUcao3vFUCE,1523
@@ -42,8 +42,8 @@ mdbq/pbix/refresh_all_old.py,sha256=_pq3WSQ728GPtEG5pfsZI2uTJhU8D6ra-htIk1JXYzw,
42
42
  mdbq/req_post/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
43
43
  mdbq/req_post/req_tb.py,sha256=qg7pet73IgKGmCwxaeUyImJIoeK_pBQT9BBKD7fkBNg,36160
44
44
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
45
- mdbq/spider/aikucun.py,sha256=jHrdGWBJQaSywx7V-U4YuM6vWkwC5SR5tTOOdB3YU_c,17306
46
- mdbq-2.9.5.dist-info/METADATA,sha256=kXmviy083bUKwNHey1JZepeXQYgRTnz_ahy6pegjzJg,243
47
- mdbq-2.9.5.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
48
- mdbq-2.9.5.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
49
- mdbq-2.9.5.dist-info/RECORD,,
45
+ mdbq/spider/aikucun.py,sha256=01qJo_Di5Kmi2lG5_HKb0OI283b1-Pgqh-nnA0pX4TY,19038
46
+ mdbq-2.9.7.dist-info/METADATA,sha256=ekYpjMjTHFv6a4rLxf9LiRjwZC49l8XpNQdkXD2shQM,243
47
+ mdbq-2.9.7.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
48
+ mdbq-2.9.7.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
49
+ mdbq-2.9.7.dist-info/RECORD,,
File without changes