mdbq 2.3.2__tar.gz → 2.3.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. {mdbq-2.3.2 → mdbq-2.3.4}/PKG-INFO +1 -1
  2. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/aggregation/aggregation.py +10 -7
  3. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/aggregation/query_data.py +64 -0
  4. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/clean/data_clean.py +13 -1
  5. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/spider/aikucun.py +9 -2
  6. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq.egg-info/PKG-INFO +1 -1
  7. {mdbq-2.3.2 → mdbq-2.3.4}/setup.py +1 -1
  8. {mdbq-2.3.2 → mdbq-2.3.4}/README.txt +0 -0
  9. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/__init__.py +0 -0
  10. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/__version__.py +0 -0
  11. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/aggregation/__init__.py +0 -0
  12. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/aggregation/df_types.py +0 -0
  13. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/aggregation/mysql_types.py +0 -0
  14. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/aggregation/optimize_data.py +0 -0
  15. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/bdup/__init__.py +0 -0
  16. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/bdup/bdup.py +0 -0
  17. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/clean/__init__.py +0 -0
  18. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/company/__init__.py +0 -0
  19. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/company/copysh.py +0 -0
  20. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/company/home_sh.py +0 -0
  21. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/config/__init__.py +0 -0
  22. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/config/get_myconf.py +0 -0
  23. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/config/products.py +0 -0
  24. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/config/set_support.py +0 -0
  25. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/config/update_conf.py +0 -0
  26. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/dataframe/__init__.py +0 -0
  27. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/dataframe/converter.py +0 -0
  28. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/log/__init__.py +0 -0
  29. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/log/mylogger.py +0 -0
  30. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/mongo/__init__.py +0 -0
  31. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/mongo/mongo.py +0 -0
  32. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/mysql/__init__.py +0 -0
  33. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/mysql/mysql.py +0 -0
  34. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/mysql/s_query.py +0 -0
  35. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/mysql/year_month_day.py +0 -0
  36. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/other/__init__.py +0 -0
  37. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/other/porxy.py +0 -0
  38. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/other/pov_city.py +0 -0
  39. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/other/sku_picture.py +0 -0
  40. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/other/ua_sj.py +0 -0
  41. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/pbix/__init__.py +0 -0
  42. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/pbix/pbix_refresh.py +0 -0
  43. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/pbix/refresh_all.py +0 -0
  44. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/pbix/refresh_all_old.py +0 -0
  45. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/req_post/__init__.py +0 -0
  46. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/req_post/req_tb.py +0 -0
  47. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq/spider/__init__.py +0 -0
  48. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq.egg-info/SOURCES.txt +0 -0
  49. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq.egg-info/dependency_links.txt +0 -0
  50. {mdbq-2.3.2 → mdbq-2.3.4}/mdbq.egg-info/top_level.txt +0 -0
  51. {mdbq-2.3.2 → mdbq-2.3.4}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 2.3.2
3
+ Version: 2.3.4
4
4
  Home-page: https://pypi.org/project/mdbsql
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -526,6 +526,9 @@ class DatabaseUpdate:
526
526
  elif name.endswith('.csv') and '竞店分析-来源分析-入店搜索词' in name:
527
527
  df = pd.read_csv(os.path.join(root, name), encoding='utf-8_sig', header=0, na_filter=False)
528
528
  check_remove_file = True
529
+ elif name.endswith('.csv') and '爱库存_商品榜单' in name:
530
+ df = pd.read_csv(os.path.join(root, name), encoding='utf-8_sig', header=0, na_filter=False)
531
+ check_remove_file = True
529
532
  # ----------------------- 京东数据处理分界线 -----------------------
530
533
  # ----------------------- 京东数据处理分界线 -----------------------
531
534
  elif name.endswith('.xlsx') and '店铺来源_流量来源' in name:
@@ -1296,13 +1299,13 @@ if __name__ == '__main__':
1296
1299
  username, password, host, port = get_myconf.select_config_values(target_service='nas', database='mysql')
1297
1300
  print(username, password, host, port)
1298
1301
  # file_dir(one_file=False, target_service='company')
1299
- one_file_to_mysql(
1300
- file='/Users/xigua/Downloads/爱库存_商品榜单_spu_2024-10-17_2024-10-17.csv',
1301
- db_name='爱库存2',
1302
- table_name='商品spu榜单',
1303
- target_service='company',
1304
- database='mysql'
1305
- )
1302
+ # one_file_to_mysql(
1303
+ # file='/Users/xigua/Downloads/爱库存_商品榜单_spu_2024-10-17_2024-10-17.csv',
1304
+ # db_name='爱库存2',
1305
+ # table_name='商品spu榜单',
1306
+ # target_service='company',
1307
+ # database='mysql'
1308
+ # )
1306
1309
 
1307
1310
  # db_name = '推广数据2'
1308
1311
  # table_name = '权益报表'
@@ -688,6 +688,53 @@ class MysqlDatasQuery:
688
688
  df = pd.concat(_datas, axis=0, ignore_index=True)
689
689
  return df
690
690
 
691
+ def aikucun_bd_spu(self):
692
+ start_date, end_date = self.months_data(num=self.months)
693
+ projection = {
694
+ '日期': 1,
695
+ 'spi_id': 1,
696
+ '商品名称': 1,
697
+ '品牌名称': 1,
698
+ '商品款号': 1,
699
+ '一级类目名称': 1,
700
+ '二级类目名称': 1,
701
+ '三级类目名称': 1,
702
+ '转发次数': 1,
703
+ '转发爱豆人数': 1,
704
+ '访客量': 1,
705
+ '浏览量': 1,
706
+ '下单gmv': 1,
707
+ '成交gmv': 1,
708
+ '供货额': 1,
709
+ '供货价': 1,
710
+ '销售爱豆人数_成交': 1,
711
+ '支付人数_交易': 1,
712
+ '支付人数_成交': 1,
713
+ '销售量_成交': 1,
714
+ '销售量_交易': 1,
715
+ '订单数_成交': 1,
716
+ '订单数_交易': 1,
717
+ '成交率_交易': 1,
718
+ '成交率_成交': 1,
719
+ '可售库存数': 1,
720
+ '售罄率': 1,
721
+ '在架sku数': 1,
722
+ '可售sku数': 1,
723
+ 'sku数_交易': 1,
724
+ 'sku数_成交': 1,
725
+ '营销后供货额': 1,
726
+ '营销后供货价': 1,
727
+ }
728
+ projection = {}
729
+ df = self.download.data_to_df(
730
+ db_name='爱库存2',
731
+ table_name='商品spu榜单',
732
+ start_date=start_date,
733
+ end_date=end_date,
734
+ projection=projection,
735
+ )
736
+ return df
737
+
691
738
 
692
739
  class GroupBy:
693
740
  """
@@ -822,6 +869,17 @@ class GroupBy:
822
869
  return df
823
870
  elif '商品索引表' in table_name:
824
871
  return df
872
+ elif '爱库存_商品spu榜单' in table_name:
873
+ df.drop_duplicates(
874
+ subset=[
875
+ '日期',
876
+ 'spu_id',
877
+ '访客量',
878
+ '浏览量',
879
+ '下单gmv',
880
+ '成交gmv',
881
+ ], keep='last', inplace=True, ignore_index=True)
882
+ return df
825
883
  elif '人群报表' in table_name:
826
884
  df.rename(columns={
827
885
  '场景名字': '营销场景',
@@ -1713,6 +1771,12 @@ def data_aggregation(service_databases=[{}], months=1):
1713
1771
  '唯一主键': [],
1714
1772
  '数据主体': sdq.tg_by_day(),
1715
1773
  },
1774
+ {
1775
+ '数据库名': '聚合数据',
1776
+ '集合名': '爱库存_商品spu榜单',
1777
+ '唯一主键': [],
1778
+ '数据主体': sdq.aikucun_bd_spu(),
1779
+ },
1716
1780
  ]
1717
1781
  for items in data_dict: # 遍历返回结果
1718
1782
  db_name, table_name, unique_key_list, df = items['数据库名'], items['集合名'], items['唯一主键'], items['数据主体']
@@ -1142,11 +1142,23 @@ class DataClean:
1142
1142
  shutil.move(os.path.join(path, _name), t2) # 将文件从下载文件夹移到目标位置
1143
1143
 
1144
1144
  # @try_except
1145
- def move_all(self, path=None):
1145
+ def move_all(self, path=None, is_except=[]):
1146
1146
  if not path:
1147
1147
  path = self.path
1148
1148
  for root, dirs, files in os.walk(path, topdown=False):
1149
1149
  for name in files:
1150
+ print(name)
1151
+ is_continue = False
1152
+ if is_except:
1153
+ for item in is_except:
1154
+ print(item, f'-----', os.path.join(root, name))
1155
+ if item in os.path.join(root, name):
1156
+ # print(name)
1157
+ is_continue = True
1158
+ break
1159
+ if is_continue: # 需要排除不做处理的文件或文件夹
1160
+ continue
1161
+ print(is_except, is_continue)
1150
1162
  def bib(paths, _as_month=None):
1151
1163
  """闭包函数"""
1152
1164
  self.move_files(path=path, _name=name, target_path=paths, _as_month=_as_month)
@@ -27,17 +27,20 @@ if platform.system() == 'Windows':
27
27
  Data_Path = r'C:\同步空间\BaiduSyncdisk'
28
28
  D_PATH = str(pathlib.Path(f'C:\\Users\\{getpass.getuser()}\\Downloads'))
29
29
  Share_Path = str(pathlib.Path(r'\\192.168.1.198\时尚事业部\01.运营部\天猫报表')) # 共享文件根目录
30
+ Source_Path = str(pathlib.Path(Data_Path, '原始文件2'))
30
31
  elif platform.system() == 'Linux':
31
32
  Data_Path = '数据中心'
32
33
  D_PATH = 'Downloads'
33
34
  if not os.path.exists(D_PATH):
34
35
  os.makedirs(D_PATH)
35
36
  Share_Path = '' # linux 通常是远程服务器,不需要访问共享
37
+ Source_Path = str(pathlib.Path(Data_Path, '原始文件2'))
36
38
  else:
37
39
  Data_Path = f'/Users/{getpass.getuser()}/数据中心' # 使用Mac独立网络时
38
40
  # Data_Path = '/Volumes' # 直接使用共享连接台式机时的配置, 后面接 + 自动0备份/***
39
41
  D_PATH = str(pathlib.Path(f'/Users/{getpass.getuser()}/Downloads'))
40
42
  Share_Path = str(pathlib.Path('/Volumes/时尚事业部/01.运营部/天猫报表')) # 共享文件根目录
43
+ Source_Path = str(pathlib.Path(Data_Path, '原始文件2'))
41
44
 
42
45
 
43
46
  def test():
@@ -251,6 +254,10 @@ class AikuCun:
251
254
  if not pattern:
252
255
  continue
253
256
  df = pd.read_csv(os.path.join(root, name), encoding='gb2312', header=0, na_filter=False)
257
+ if len(df) == 0:
258
+ print(f'数据长度为 0 : {name}')
259
+ os.remove(os.path.join(root, name))
260
+ continue
254
261
  df.insert(loc=0, column='日期', value=date) # df中插入新列
255
262
  df.rename(columns={'spuId': 'spu_id'}, inplace=True)
256
263
  df['数据更新时间'] = pd.to_datetime(df['数据更新时间'], format='%Y-%m-%d %H:%M:%S', errors='ignore')
@@ -264,7 +271,7 @@ class AikuCun:
264
271
 
265
272
  def akucun():
266
273
  akc = AikuCun()
267
- akc.get_data(shop_name='aikucun', date_num=3)
274
+ akc.get_data(shop_name='aikucun', date_num=1)
268
275
  # akc.clean_data()
269
276
 
270
277
  # 新版 数据分类
@@ -284,7 +291,7 @@ def akucun():
284
291
  c.set_up_to_mysql = False # 不再使用 data_clean 更新数据库,改为 aggregation.py
285
292
  c.new_unzip(is_move=True, ) # 解压文件
286
293
  c.change_and_sort(is_except=['临时文件'])
287
- c.move_all() # 移到文件到原始文件夹
294
+ c.move_all(is_except=['临时文件']) # 移到文件到原始文件夹
288
295
 
289
296
 
290
297
  if __name__ == '__main__':
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 2.3.2
3
+ Version: 2.3.4
4
4
  Home-page: https://pypi.org/project/mdbsql
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -3,7 +3,7 @@
3
3
  from setuptools import setup, find_packages
4
4
 
5
5
  setup(name='mdbq',
6
- version='2.3.2',
6
+ version='2.3.4',
7
7
  author='xigua, ',
8
8
  author_email="2587125111@qq.com",
9
9
  url='https://pypi.org/project/mdbsql',
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes