mdbq 2.7.6__py3-none-any.whl → 2.7.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -439,6 +439,7 @@ class DataClean:
439
439
  # 品销宝
440
440
  sheets4 = ['账户', '推广计划', '推广单元', '创意', '品牌流量包', '定向人群'] # 品销宝
441
441
  file_name4 = os.path.splitext(name)[0] # 明星店铺报表
442
+ new_df = []
442
443
  for sheet4 in sheets4:
443
444
  df = pd.read_excel(os.path.join(root, name), sheet_name=sheet4, header=0, engine='openpyxl')
444
445
  if len(df) == 0:
@@ -452,8 +453,8 @@ class DataClean:
452
453
  shop_name = re.findall(r'明星店铺_([\u4e00-\u9fffA-Za-z]+店)', name)[0]
453
454
  df.insert(loc=1, column='店铺名称', value=shop_name)
454
455
  df.insert(loc=2, column='报表类型', value=sheet4)
455
- if '访客触达率' not in df.columns.tolist():
456
- df['访客触达率'] = '0'
456
+ # if '访客触达率' not in df.columns.tolist():
457
+ # df['访客触达率'] = '0'
457
458
  df.fillna(0, inplace=True)
458
459
  df['日期'] = pd.to_datetime(df['日期'], format='%Y-%m-%d', errors='ignore') # 转换日期列
459
460
  # min_clm = str(df['日期'].min()).split(' ')[0]
@@ -462,21 +463,12 @@ class DataClean:
462
463
  # 以sheet名进一步创建子文件夹
463
464
  # root_new = os.path.join(self.source_path, '推广报表/品销宝', sheet4)
464
465
  self.save_to_csv(df, upload_path, new_file_name4)
465
- if not db_name or not collection_name:
466
- continue
467
- # 一定要单独处理品销宝报表,因为 1 表包含 6个 sheet,都要添加
468
- self.datas.append(
469
- {
470
- '数据库名': db_name,
471
- '集合名称': collection_name,
472
- '数据主体': df,
473
- '文件名': name,
474
- }
475
- )
466
+ new_df.append(df)
467
+ df = pd.concat(new_df) # 品销宝 1 表有 6 个 sheet
476
468
  os.remove(os.path.join(root, name))
477
469
 
478
470
  # 将数据传入 self.datas 等待更新进数据库
479
- if not db_name or not collection_name or '品销宝' in collection_name:
471
+ if not db_name or not collection_name:
480
472
  # print(f'db_name/collection_name 不能为空')
481
473
  continue
482
474
  self.datas.append(
@@ -1536,9 +1528,10 @@ def date_table():
1536
1528
  )
1537
1529
 
1538
1530
 
1539
- def main(is_mysql=False):
1531
+ def main(is_mysql=False, is_company=False):
1540
1532
  """
1541
1533
  is_mysql: 调试时加,False: 是否后续的聚合数据
1534
+ is_company: 公司电脑不需要移动文件到原始文件
1542
1535
  """
1543
1536
 
1544
1537
  cn = DataClean(
@@ -1556,12 +1549,13 @@ def main(is_mysql=False):
1556
1549
  cn.sp_scene_clean(is_except=['except']) # 商品素材
1557
1550
  cn.upload_df() # 上传数据库
1558
1551
 
1559
- cn.move_sycm(is_except=['临时文件', ]) # 生意参谋,移到文件到原始文件夹
1560
- cn.move_dmp(is_except=['临时文件', ]) # 达摩盘
1561
- cn.move_sjy(is_except=['临时文件',]) # 生意经,移到文件到原始文件夹
1562
- cn.move_jd(is_except=['临时文件', ]) # 京东,移到文件到原始文件夹
1563
- cn.move_tg_tm(is_except=['临时文件', ]) # 天猫,移到文件到原始文件夹
1564
- cn.move_tg_tb(is_except=['临时文件', ]) # 淘宝店,移到文件到原始文件夹
1552
+ if not is_company:
1553
+ cn.move_sycm(is_except=['临时文件', ]) # 生意参谋,移到文件到原始文件夹
1554
+ cn.move_dmp(is_except=['临时文件', ]) # 达摩盘
1555
+ cn.move_sjy(is_except=['临时文件',]) # 生意经,移到文件到原始文件夹
1556
+ cn.move_jd(is_except=['临时文件', ]) # 京东,移到文件到原始文件夹
1557
+ cn.move_tg_tm(is_except=['临时文件', ]) # 天猫,移到文件到原始文件夹
1558
+ cn.move_tg_tb(is_except=['临时文件', ]) # 淘宝店,移到文件到原始文件夹
1565
1559
 
1566
1560
  if not is_mysql:
1567
1561
  return
mdbq/company/copysh.py CHANGED
@@ -358,6 +358,7 @@ def main():
358
358
 
359
359
  clean_upload.main(
360
360
  is_mysql=True, # 调试时加,False: 是否后续的聚合数据及清理
361
+ is_company=True, # 公司电脑不需要移动文件到原始文件
361
362
  )
362
363
 
363
364
  #
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 2.7.6
3
+ Version: 2.7.8
4
4
  Home-page: https://pypi.org/project/mdbq
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -9,10 +9,10 @@ mdbq/aggregation/query_data.py,sha256=QpfIWX197ICqRnRhLZoKgkX9CuWvx_ryfScSaFdDN-
9
9
  mdbq/bdup/__init__.py,sha256=AkhsGk81SkG1c8FqDH5tRq-8MZmFobVbN60DTyukYTY,28
10
10
  mdbq/bdup/bdup.py,sha256=LAV0TgnQpc-LB-YuJthxb0U42_VkPidzQzAagan46lU,4234
11
11
  mdbq/clean/__init__.py,sha256=A1d6x3L27j4NtLgiFV5TANwEkLuaDfPHDQNrPBbNWtU,41
12
- mdbq/clean/clean_upload.py,sha256=Mlt9h31Wa_VHhTG40jWioRqk8ELZxDEHMurLalYAJ-o,83627
12
+ mdbq/clean/clean_upload.py,sha256=rNiv_7wcOgYfks1wK2sLHWNyi7jProCZgHZSGjC7IlQ,83318
13
13
  mdbq/clean/data_clean.py,sha256=ucfslhqXVZoH2QaXHSAWDky0GhIvH9f4GeNaHg4SrFE,104790
14
14
  mdbq/company/__init__.py,sha256=qz8F_GsP_pMB5PblgJAUAMjasuZbOEp3qQOCB39E8f0,21
15
- mdbq/company/copysh.py,sha256=4VOAvHZfnhsKdeKL0Ho6q2x1OkD6nsZXRLyYdu8YAaE,18011
15
+ mdbq/company/copysh.py,sha256=E8OmCpXAIXwyfcgoT_H9aDf2bqod-gxjvBnOG0rkSss,18097
16
16
  mdbq/company/copysh_bak.py,sha256=NvlXCBZBcO2GIT5nLRYYqhOyHWM1-1RE7DHvgbj6jmQ,19723
17
17
  mdbq/company/home_sh.py,sha256=42CZ2tZIXHLl2mOl2gk2fZnjH2IHh1VJ1s3qHABjonY,18021
18
18
  mdbq/config/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
@@ -45,7 +45,7 @@ mdbq/req_post/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
45
45
  mdbq/req_post/req_tb.py,sha256=PexWSCPJNM6Tv0ol4lAWIhlOwsAr_frnjtcdSHCFiek,36179
46
46
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
47
47
  mdbq/spider/aikucun.py,sha256=4Y5zd64hZUFtll8AdpUc2napDas-La-A6XzAhb2mLv0,17157
48
- mdbq-2.7.6.dist-info/METADATA,sha256=g9yVclBoUQygTKxK2ciOGKeyaHjDsdw0um9CiD-HA-k,243
49
- mdbq-2.7.6.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
50
- mdbq-2.7.6.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
51
- mdbq-2.7.6.dist-info/RECORD,,
48
+ mdbq-2.7.8.dist-info/METADATA,sha256=bE6NNrtpM5wLHfYaQ3jTj4AplgS8laGquzmAYHlp3iI,243
49
+ mdbq-2.7.8.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
50
+ mdbq-2.7.8.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
51
+ mdbq-2.7.8.dist-info/RECORD,,
File without changes