mdbq 2.9.6__py3-none-any.whl → 2.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1150,51 +1150,39 @@ def upload_dir(path, db_name, collection_name, json_path=None):
1150
1150
  df = pd.read_csv(os.path.join(root, name), encoding='utf-8_sig', header=0, na_filter=False)
1151
1151
  if name.endswith('.xlsx'):
1152
1152
  df = pd.read_excel(os.path.join(root, name), sheet_name=0, header=0, engine='openpyxl')
1153
- try:
1154
- if len(df) == 0:
1155
- continue
1156
- # if '新版' not in name:
1157
- # continue
1158
- cv = converter.DataFrameConverter()
1159
- df = cv.convert_df_cols(df=df) # 清理列名和 df 中的非法字符
1160
-
1161
- try:
1162
- df = df.astype(dtypes) # 按本地文件更新 df 的数据类型, 可能因为字段不同产生异常
1163
- except Exception as e:
1164
- print(name, e)
1165
- # 如果发生异常,这将 df 的数据和 json 中的数据取交集
1166
- old_dt = df.dtypes.apply(str).to_dict() # 将 dataframe 数据类型转为字典形式
1167
- intersection_keys = dtypes.keys() & old_dt.keys() # 获取两个字典键的交集
1168
- dtypes = {k: dtypes[k] for k in intersection_keys} # 使用交集的键创建新字典
1169
- df = df.astype(dtypes) # 再次更新 df 的数据类型
1170
- df.fillna(0, inplace=True)
1171
-
1172
- # for col in df.columns.tolist():
1173
- # df[col] = df[col].apply(lambda x: 0 if str(x) == '' else x)
1174
- # print(f'{i}/{count}')
1175
- # sql_engine = create_engine(
1176
- # f"mysql+pymysql://{username}:{password}@{host}:{port}/{db_name}") # 创建数据库引擎
1177
- # df.to_sql(
1178
- # name=collection_name,
1179
- # con=sql_engine,
1180
- # if_exists='append',
1181
- # index=False,
1182
- # chunksize=1000
1183
- # )
1153
+ # try:
1154
+ if len(df) == 0:
1155
+ continue
1156
+ # if '新版' not in name:
1157
+ # continue
1158
+ # cv = converter.DataFrameConverter()
1159
+ # df = cv.convert_df_cols(df=df) # 清理列名和 df 中的非法字符
1184
1160
 
1161
+ # try:
1162
+ # df = df.astype(dtypes) # 按本地文件更新 df 的数据类型, 可能因为字段不同产生异常
1163
+ # except Exception as e:
1164
+ # print(name, e)
1165
+ # # 如果发生异常,这将 df 的数据和 json 中的数据取交集
1166
+ # old_dt = df.dtypes.apply(str).to_dict() # 将 dataframe 数据类型转为字典形式
1167
+ # intersection_keys = dtypes.keys() & old_dt.keys() # 获取两个字典键的交集
1168
+ # dtypes = {k: dtypes[k] for k in intersection_keys} # 使用交集的键创建新字典
1169
+ # df = df.astype(dtypes) # 再次更新 df 的数据类型
1170
+ df.fillna(0, inplace=True)
1171
+ for col in df.columns.tolist():
1172
+ df[col] = df[col].apply(lambda x: 0 if str(x) == '' else x)
1185
1173
 
1174
+ # if '更新时间' not in df.columns.tolist():
1175
+ # df['更新时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
1176
+ #
1177
+ m.df_to_mysql(df=df, db_name=db_name, table_name=collection_name,
1178
+ move_insert=False, # 先删除,再插入
1179
+ df_sql = True,
1180
+ drop_duplicates=False,
1181
+ count=f'{i}/{count}',
1182
+ filename=name,
1183
+ )
1184
+ # nas.df_to_mysql(df=df, db_name=db_name, table_name=collection_name, drop_duplicates=True,)
1186
1185
 
1187
- #
1188
- m.df_to_mysql(df=df, db_name=db_name, table_name=collection_name,
1189
- move_insert=False, # 先删除,再插入
1190
- df_sql = True,
1191
- drop_duplicates=False,
1192
- count=f'{i}/{count}',
1193
- filename=name,
1194
- )
1195
- # nas.df_to_mysql(df=df, db_name=db_name, table_name=collection_name, drop_duplicates=True,)
1196
- except Exception as e:
1197
- print(name, e)
1198
1186
  i += 1
1199
1187
 
1200
1188
 
@@ -1282,13 +1270,13 @@ if __name__ == '__main__':
1282
1270
  # )
1283
1271
 
1284
1272
  # test()
1285
- col = 0
1273
+ col = 1
1286
1274
  if col:
1287
1275
  # 上传一个目录到指定数据库
1288
- db_name = '生意参谋3'
1289
- table_name = '店铺流量来源构成'
1276
+ db_name = '京东数据3'
1277
+ table_name = '推广数据_全站营销'
1290
1278
  upload_dir(
1291
- path=r'/Users/xigua/数据中心/原始文件3/生意参谋/店铺流量来源',
1279
+ path=r'/Users/xigua/数据中心/原始文件3/京东报表/京准通_全站营销',
1292
1280
  db_name=db_name,
1293
1281
  collection_name=table_name,
1294
1282
  )
@@ -2409,11 +2409,15 @@ def main(days=100, months=3):
2409
2409
 
2410
2410
 
2411
2411
  if __name__ == '__main__':
2412
- main(days=100, months=3)
2412
+ # main(days=100, months=3)
2413
2413
 
2414
2414
  # data_aggregation(
2415
2415
  # months=3,
2416
2416
  # is_juhe=True, # 生成聚合表
2417
2417
  # # less_dict=['天猫_品销宝账户报表'], # 单独聚合某一个数据库
2418
2418
  # )
2419
-
2419
+ data_aggregation(
2420
+ months=1,
2421
+ is_juhe=True, # 生成聚合表
2422
+ # less_dict=['天猫_品销宝账户报表'], # 单独聚合某一个数据库
2423
+ )
mdbq/mysql/mysql.py CHANGED
@@ -376,17 +376,17 @@ class MysqlUpload:
376
376
  cols = df.columns.tolist()
377
377
  for col in cols:
378
378
  df[col] = df[col].apply(lambda x: float(re.sub(r'%$', '', str(x))) / 100 if (
379
- str(x) != '' and str(x).endswith('%')) else '0.0' if str(x) == '0%' else x)
379
+ str(x) != '' and str(x).endswith('%')) and not re.findall('[\\u4e00-\\u9fa5]', str(x)) else '0.0' if str(x) == '0%' else x)
380
380
  try:
381
381
  df[col] = df[col].apply(
382
382
  lambda x: int(x) if '_' not in str(x) and '.' not in str(x) else x) # 不含小数点尝试转整数
383
383
  except:
384
384
  pass
385
- if df[col].dtype == 'object':
386
- try:
385
+ try:
386
+ if df[col].dtype == 'object': # 有些列没有被 pandas 识别数据类型,会没有 dtype 属性
387
387
  df[col] = df[col].apply(lambda x: float(x) if '.' in str(x) and '_' not in str(x) else x)
388
- except:
389
- pass
388
+ except:
389
+ pass
390
390
  new_col = col.lower()
391
391
  new_col = re.sub(r'[()\-,,&~^、 ()\"\'“”=·/。》《><!!`]', '_', new_col, re.IGNORECASE)
392
392
  new_col = new_col.replace(')', '')
@@ -432,7 +432,7 @@ class MysqlUpload:
432
432
  __res_dict.update({k: 'varchar(255)'})
433
433
  return __res_dict, df
434
434
 
435
- @try_except
435
+ # @try_except
436
436
  def df_to_mysql(self, df, db_name, table_name, set_typ=None, icm_update=[], move_insert=False, df_sql=False, drop_duplicates=False,
437
437
  filename=None, count=None, reset_id=False):
438
438
  """
@@ -532,18 +532,19 @@ class MysqlUpload:
532
532
  chunksize=1000
533
533
  )
534
534
  if reset_id:
535
- # 6. 重置自增列
536
- try:
537
- cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
538
- result = cursor.fetchone()
539
- if result:
540
- cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
541
- cursor.execute(
542
- f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
543
- cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
544
- except Exception as e:
545
- print(f'{e}')
546
- connection.rollback()
535
+ pass
536
+ # # 6. 重置自增列
537
+ # try:
538
+ # cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
539
+ # result = cursor.fetchone()
540
+ # if result:
541
+ # cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
542
+ # cursor.execute(
543
+ # f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
544
+ # cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
545
+ # except Exception as e:
546
+ # print(f'{e}')
547
+ # connection.rollback()
547
548
  connection.commit() # 提交事务
548
549
  connection.close()
549
550
  return
@@ -570,17 +571,18 @@ class MysqlUpload:
570
571
  )
571
572
  # 6. 重置自增列
572
573
  if reset_id:
573
- try:
574
- cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
575
- result = cursor.fetchone()
576
- if result:
577
- cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
578
- cursor.execute(
579
- f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
580
- cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
581
- except Exception as e:
582
- print(f'{e}')
583
- connection.rollback()
574
+ pass
575
+ # try:
576
+ # cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
577
+ # result = cursor.fetchone()
578
+ # if result:
579
+ # cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
580
+ # cursor.execute(
581
+ # f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
582
+ # cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
583
+ # except Exception as e:
584
+ # print(f'{e}')
585
+ # connection.rollback()
584
586
  connection.close()
585
587
  return
586
588
 
@@ -589,6 +591,9 @@ class MysqlUpload:
589
591
  # data 是传进来待处理的数据, 不是数据库数据
590
592
  # data 示例: {'日期': Timestamp('2024-08-27 00:00:00'), '推广费余额': 33299, '品销宝余额': 2930.73, '短信剩余': 67471}
591
593
  try:
594
+ cols = ', '.join(f"`{item}`" for item in data.keys()) # 列名需要转义
595
+ # data.update({item: f"{data[item]}" for item in data.keys()}) # 全部值转字符, 不是必须的
596
+ values = ', '.join([f'"{item}"' for item in data.values()]) # 值要加引号
592
597
  condition = []
593
598
  for k, v in data.items():
594
599
  condition += [f'`{k}` = "{v}"']
@@ -670,18 +675,18 @@ class MysqlUpload:
670
675
  print(f'mysql -> df_to_mysql 报错: {e}, {self.filename}')
671
676
  # breakpoint()
672
677
 
673
- # 6. 重置自增列
674
- try:
675
- cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
676
- result = cursor.fetchone()
677
- if result:
678
- cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
679
- cursor.execute(
680
- f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
681
- cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
682
- except Exception as e:
683
- print(f'{e}')
684
- connection.rollback()
678
+ # # 6. 重置自增列
679
+ # try:
680
+ # cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
681
+ # result = cursor.fetchone()
682
+ # if result:
683
+ # cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
684
+ # cursor.execute(
685
+ # f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
686
+ # cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
687
+ # except Exception as e:
688
+ # print(f'{e}')
689
+ # connection.rollback()
685
690
  connection.commit() # 提交事务
686
691
  connection.close()
687
692
 
@@ -911,18 +916,18 @@ class OptimizeDatas:
911
916
  else: # 不存在日期列的情况
912
917
  self.delete_duplicate2(table_name=table_name)
913
918
 
914
- # 5. 重置自增列 (id 列)
915
- try:
916
- cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
917
- result = cursor.fetchone()
918
- if result:
919
- cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
920
- cursor.execute(
921
- f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
922
- cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
923
- except Exception as e:
924
- print(f'{e}')
925
- self.connection.rollback()
919
+ # # 5. 重置自增列 (id 列)
920
+ # try:
921
+ # cursor.execute(f"SHOW COLUMNS FROM {table_name} LIKE 'id'")
922
+ # result = cursor.fetchone()
923
+ # if result:
924
+ # cursor.execute(f"ALTER TABLE {table_name} DROP COLUMN id;") # 删除 id 列
925
+ # cursor.execute(
926
+ # f"ALTER TABLE {table_name} ADD column id INT AUTO_INCREMENT PRIMARY KEY FIRST;")
927
+ # cursor.execute(f"ALTER TABLE {table_name} AUTO_INCREMENT = 1") # 设置自增从 1 开始
928
+ # except Exception as e:
929
+ # print(f'{e}')
930
+ # self.connection.rollback()
926
931
  self.connection.close()
927
932
  now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S ")
928
933
  print(f'{now}mysql({self.host}: {self.port}) {self.db_name} 数据库优化完成!')
mdbq/spider/aikucun.py CHANGED
@@ -20,6 +20,9 @@ from selenium.webdriver.common.keys import Keys
20
20
  from mdbq.aggregation import aggregation
21
21
  from mdbq.clean import data_clean
22
22
  from mdbq.other import ua_sj
23
+ from mdbq.mysql import mysql
24
+ from mdbq.config import myconfig
25
+ import socket
23
26
 
24
27
  warnings.filterwarnings('ignore')
25
28
 
@@ -44,6 +47,23 @@ else:
44
47
  Share_Path = str(pathlib.Path('/Volumes/时尚事业部/01.运营部/天猫报表')) # 共享文件根目录
45
48
  Source_Path = str(pathlib.Path(Data_Path, '原始文件2'))
46
49
  upload_path = os.path.join(D_PATH, '数据上传中心') # 此目录位于下载文件夹
50
+ if socket.gethostname() == 'company' or socket.gethostname() == 'Mac2.local':
51
+ conf = myconfig.main()
52
+ conf_data = conf['Windows']['company']['mysql']['local']
53
+ username, password, host, port = conf_data['username'], conf_data['password'], conf_data['host'], conf_data[
54
+ 'port']
55
+ else:
56
+ conf = myconfig.main()
57
+ conf_data = conf['Windows']['xigua_lx']['mysql']['local']
58
+ username, password, host, port = conf_data['username'], conf_data['password'], conf_data['host'], conf_data[
59
+ 'port']
60
+ m_engine = mysql.MysqlUpload(
61
+ username=username,
62
+ password=password,
63
+ host=host,
64
+ port=port,
65
+ charset='utf8mb4'
66
+ )
47
67
 
48
68
 
49
69
  def get_cookie_aikucun():
@@ -262,6 +282,12 @@ class AikuCun:
262
282
  _driver.quit()
263
283
 
264
284
  def clean_data(self, date):
285
+ set_typ = {
286
+ '店铺名称': 'varchar(100)',
287
+ 'spu_id': 'varchar(100)',
288
+ '图片': 'varchar(255)',
289
+ '数据更新时间': 'timestamp',
290
+ }
265
291
  for root, dirs, files in os.walk(upload_path, topdown=False):
266
292
  for name in files:
267
293
  if '~$' in name or 'DS_Store' in name:
@@ -285,6 +311,21 @@ class AikuCun:
285
311
  # df['数据更新时间'] = df['数据更新时间'].apply(lambda x: re.sub(' ', ' ', str(x)) if x else x)
286
312
  # print(df['数据更新时间'])
287
313
  # breakpoint()
314
+
315
+ m_engine.df_to_mysql(
316
+ df=df,
317
+ db_name='爱库存2',
318
+ table_name='商品spu榜单',
319
+ icm_update=[], # 增量更新, 在聚合数据中使用,其他不要用
320
+ move_insert=False, # 先删除,再插入
321
+ df_sql=True, # 值为 True 时使用 df.to_sql 函数上传整个表, 不会排重
322
+ drop_duplicates=False, # 值为 True 时检查重复数据再插入,反之直接上传,会比较慢
323
+ count=None,
324
+ filename=None, # 用来追踪处理进度
325
+ reset_id=False, # 是否重置自增列
326
+ set_typ=set_typ,
327
+ )
328
+
288
329
  new_name = f'爱库存_商品榜单_spu_{date}_{date}.csv'
289
330
  df.to_csv(os.path.join(root, new_name), encoding='utf-8_sig', index=False)
290
331
  os.remove(os.path.join(root, name))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 2.9.6
3
+ Version: 2.9.7
4
4
  Home-page: https://pypi.org/project/mdbq
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -1,11 +1,11 @@
1
1
  mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
2
2
  mdbq/__version__.py,sha256=y9Mp_8x0BCZSHsdLT_q5tX9wZwd5QgqrSIENLrb6vXA,62
3
3
  mdbq/aggregation/__init__.py,sha256=EeDqX2Aml6SPx8363J-v1lz0EcZtgwIBYyCJV6CcEDU,40
4
- mdbq/aggregation/aggregation.py,sha256=3d_sx-cFrW-c03D5Ry9jf144Ph3d0znIl3IHmnInsYA,73902
4
+ mdbq/aggregation/aggregation.py,sha256=2KCVXZygQt4xVxGbFcDMBpL3PukY4yQF_uI-qLSTWaU,73460
5
5
  mdbq/aggregation/df_types.py,sha256=U9i3q2eRPTDY8qAPTw7irzu-Tlg4CIySW9uYro81wdk,8125
6
6
  mdbq/aggregation/mysql_types.py,sha256=YTGyrF9vcRgfkQbpT-e-JdJ7c7VF1dDHgyx9YZRES8w,10934
7
7
  mdbq/aggregation/optimize_data.py,sha256=79uwiM2WqNNFxGpE2wKz742PRq-ZGgFjdOV0vgptHdY,3513
8
- mdbq/aggregation/query_data.py,sha256=0kiJQv7xLeH7kXxPmMiUUPYIlt5gcEyzSETmJTV372U,103891
8
+ mdbq/aggregation/query_data.py,sha256=r1FU0C4zjXln7oVSrRkElh4Ehl-9mYhGcq57jLbViUA,104071
9
9
  mdbq/bdup/__init__.py,sha256=AkhsGk81SkG1c8FqDH5tRq-8MZmFobVbN60DTyukYTY,28
10
10
  mdbq/bdup/bdup.py,sha256=LAV0TgnQpc-LB-YuJthxb0U42_VkPidzQzAagan46lU,4234
11
11
  mdbq/clean/__init__.py,sha256=A1d6x3L27j4NtLgiFV5TANwEkLuaDfPHDQNrPBbNWtU,41
@@ -26,7 +26,7 @@ mdbq/log/mylogger.py,sha256=oaT7Bp-Hb9jZt52seP3ISUuxVcI19s4UiqTeouScBO0,3258
26
26
  mdbq/mongo/__init__.py,sha256=SILt7xMtQIQl_m-ik9WLtJSXIVf424iYgCfE_tnQFbw,13
27
27
  mdbq/mongo/mongo.py,sha256=v9qvrp6p1ZRWuPpbSilqveiE0FEcZF7U5xUPI0RN4xs,31880
28
28
  mdbq/mysql/__init__.py,sha256=A_DPJyAoEvTSFojiI2e94zP0FKtCkkwKP1kYUCSyQzo,11
29
- mdbq/mysql/mysql.py,sha256=sfETcLkxeOjDn-B6-ShHRPrq8BUJA9Qmtj4zyIIvi3w,59499
29
+ mdbq/mysql/mysql.py,sha256=O5pmD_RB8TRKuatqL9Runrlr5E4XrLpDsPsi0vW_e8c,60075
30
30
  mdbq/mysql/recheck_mysql.py,sha256=rgTpvDMWYTyEn7UQdlig-pdXDluTgiU8JG6lkMh8DV0,8665
31
31
  mdbq/mysql/s_query.py,sha256=bgNNIqYLDCHjD5KTFcm6x4u74selpAGs5ouJYuqX86k,8447
32
32
  mdbq/mysql/year_month_day.py,sha256=VgewoE2pJxK7ErjfviL_SMTN77ki8GVbTUcao3vFUCE,1523
@@ -42,8 +42,8 @@ mdbq/pbix/refresh_all_old.py,sha256=_pq3WSQ728GPtEG5pfsZI2uTJhU8D6ra-htIk1JXYzw,
42
42
  mdbq/req_post/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
43
43
  mdbq/req_post/req_tb.py,sha256=qg7pet73IgKGmCwxaeUyImJIoeK_pBQT9BBKD7fkBNg,36160
44
44
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
45
- mdbq/spider/aikucun.py,sha256=jHrdGWBJQaSywx7V-U4YuM6vWkwC5SR5tTOOdB3YU_c,17306
46
- mdbq-2.9.6.dist-info/METADATA,sha256=rYMAeGz0dY4meetEVC4l49GVUVRf3E9kzMqoLAn0GW4,243
47
- mdbq-2.9.6.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
48
- mdbq-2.9.6.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
49
- mdbq-2.9.6.dist-info/RECORD,,
45
+ mdbq/spider/aikucun.py,sha256=01qJo_Di5Kmi2lG5_HKb0OI283b1-Pgqh-nnA0pX4TY,19038
46
+ mdbq-2.9.7.dist-info/METADATA,sha256=ekYpjMjTHFv6a4rLxf9LiRjwZC49l8XpNQdkXD2shQM,243
47
+ mdbq-2.9.7.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
48
+ mdbq-2.9.7.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
49
+ mdbq-2.9.7.dist-info/RECORD,,
File without changes