mdbq 1.2.1__tar.gz → 1.2.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. {mdbq-1.2.1 → mdbq-1.2.3}/PKG-INFO +1 -1
  2. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/aggregation/aggregation.py +30 -4
  3. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/aggregation/mysql_types.py +2 -0
  4. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/mysql/mysql.py +3 -0
  5. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq.egg-info/PKG-INFO +1 -1
  6. {mdbq-1.2.1 → mdbq-1.2.3}/setup.py +1 -1
  7. {mdbq-1.2.1 → mdbq-1.2.3}/README.txt +0 -0
  8. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/__init__.py +0 -0
  9. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/__version__.py +0 -0
  10. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/aggregation/__init__.py +0 -0
  11. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/aggregation/df_types.py +0 -0
  12. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/aggregation/optimize_data.py +0 -0
  13. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/aggregation/query_data.py +0 -0
  14. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/bdup/__init__.py +0 -0
  15. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/bdup/bdup.py +0 -0
  16. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/clean/__init__.py +0 -0
  17. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/clean/data_clean.py +0 -0
  18. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/company/__init__.py +0 -0
  19. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/company/copysh.py +0 -0
  20. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/config/__init__.py +0 -0
  21. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/config/get_myconf.py +0 -0
  22. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/config/products.py +0 -0
  23. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/config/set_support.py +0 -0
  24. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/config/update_conf.py +0 -0
  25. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/dataframe/__init__.py +0 -0
  26. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/dataframe/converter.py +0 -0
  27. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/log/__init__.py +0 -0
  28. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/log/mylogger.py +0 -0
  29. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/mongo/__init__.py +0 -0
  30. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/mongo/mongo.py +0 -0
  31. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/mysql/__init__.py +0 -0
  32. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/mysql/data_types_/345/215/263/345/260/206/345/210/240/351/231/244.py" +0 -0
  33. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/mysql/s_query.py +0 -0
  34. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/mysql/year_month_day.py +0 -0
  35. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/other/__init__.py +0 -0
  36. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/other/porxy.py +0 -0
  37. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/other/pov_city.py +0 -0
  38. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/other/ua_sj.py +0 -0
  39. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/pbix/__init__.py +0 -0
  40. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/pbix/pbix_refresh.py +0 -0
  41. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/pbix/refresh_all.py +0 -0
  42. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq/spider/__init__.py +0 -0
  43. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq.egg-info/SOURCES.txt +0 -0
  44. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq.egg-info/dependency_links.txt +0 -0
  45. {mdbq-1.2.1 → mdbq-1.2.3}/mdbq.egg-info/top_level.txt +0 -0
  46. {mdbq-1.2.1 → mdbq-1.2.3}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 1.2.1
3
+ Version: 1.2.3
4
4
  Home-page: https://pypi.org/project/mdbsql
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -438,6 +438,7 @@ class DatabaseUpdate:
438
438
  new_name = f'未分类_{date1}_全部渠道_商品明细.csv'
439
439
  df.rename(columns={'商品ID': '商品id'}, inplace=True)
440
440
  df.insert(loc=0, column='日期', value=date1)
441
+ df['最近上架时间'].loc[0] = df['最近上架时间'].loc[1] # 填充这一列, 避免上传 mysql 日期类型报错
441
442
  if 'sku' in new_name:
442
443
  db_name = '京东数据2'
443
444
  collection_name = 'sku_商品明细'
@@ -939,8 +940,13 @@ def file_dir(one_file=True):
939
940
  one_file: 值为 True 时每个文件夹取一个文件上传数据库,反之上传所有文件夹数据
940
941
  """
941
942
  filename = '文件目录对照表.csv'
942
- path = set_support.SetSupport(dirname='support').dirname # 根据平台自动适配 support 路径
943
- df = pd.read_csv(os.path.join(path, filename), encoding='utf-8_sig', header=0, na_filter=False)
943
+ if platform.system() == 'Windows':
944
+ path = 'C:\\同步空间\\BaiduSyncdisk\\原始文件2'
945
+ else:
946
+ path = '/Users/xigua/数据中心/原始文件2'
947
+
948
+ support_file = set_support.SetSupport(dirname='support').dirname
949
+ df = pd.read_csv(os.path.join(support_file, filename), encoding='utf-8_sig', header=0, na_filter=False)
944
950
  datas = df.to_dict('records') # 转字典
945
951
  for data in datas:
946
952
  # print(data)
@@ -973,10 +979,30 @@ def file_dir(one_file=True):
973
979
  data.update({'入库进度': 1}) # 更新进度为已上传
974
980
  # 将进度信息写回文件
975
981
  df = pd.DataFrame.from_dict(datas, orient='columns')
976
- df.to_csv(os.path.join(path, filename), encoding='utf-8_sig', index=False, header=True)
982
+ df.to_csv(os.path.join(support_file, filename), encoding='utf-8_sig', index=False, header=True)
983
+
984
+
985
+ def test():
986
+ path = '/Users/xigua/数据中心/原始文件2/京东报表/JD商品明细spu'
987
+ for root, dirs, files in os.walk(path, topdown=False):
988
+ for name in files:
989
+ if name.endswith('.csv') and 'baidu' not in name and '~' not in name:
990
+ df = pd.read_csv(os.path.join(root, name), encoding='utf-8_sig', header=0, na_filter=False)
991
+ df['最近上架时间'].loc[0] = df['最近上架时间'].loc[1]
992
+ # print(df[['日期', '最近上架时间']])
993
+ df.to_csv(os.path.join(root, name), encoding='utf-8_sig', index=False, header=True)
994
+ # break
995
+ # break
977
996
 
978
997
 
979
998
  if __name__ == '__main__':
980
999
  # username, password, host, port = get_myconf.select_config_values(target_service='nas', database='mysql')
981
1000
  # print(username, password, host, port)
982
- file_dir(one_file=True)
1001
+ file_dir(one_file=False)
1002
+ # one_file_to_mysql(
1003
+ # file='',
1004
+ # db_name='京东数据2',
1005
+ # table_name='商品词下排名',
1006
+ # target_service='home_lx',
1007
+ # database='mysql'
1008
+ # )
@@ -123,6 +123,7 @@ class DataTypes:
123
123
  sort_keys=True, # 默认为False。如果为True,则字典的输出将按键排序。
124
124
  indent=4,
125
125
  )
126
+ print(f'已更新 json 文件: {self.json_file}')
126
127
  time.sleep(1)
127
128
 
128
129
  def load_dtypes(self, db_name, table_name, cl='mysql', ):
@@ -236,5 +237,6 @@ def mysql_all_dtypes(db_name=None, table_name=None, path=None):
236
237
  # print(d.datas)
237
238
  d.as_json_file()
238
239
 
240
+
239
241
  if __name__ == '__main__':
240
242
  mysql_all_dtypes() # 更新 mysql 中所有数据库的 dtypes 信息到本地 json
@@ -21,6 +21,7 @@ from mdbq.aggregation import mysql_types
21
21
  warnings.filterwarnings('ignore')
22
22
  """
23
23
  建表规范:
24
+ 尽可能手动建表,再上传数据
24
25
  1. 先建 json 表,再批量上传数据;(非常重要)
25
26
  在初创数据表时, 如果有不同类报表,新版和旧版都要取一个文件,先创建数据表,再导其他数据;
26
27
  例如有的报表转化率是0%,数据类型会被识别为2位小数: decimal(10, 2),正常值应类似 0.43%,应保留4个小数, 创建类型为 decimal(10, 4)
@@ -132,6 +133,7 @@ class MysqlUpload:
132
133
  print(f'{self.filename}: {e}')
133
134
  connection.commit() # 提交事务
134
135
 
136
+ # print(cl, db_n, tb_n)
135
137
  # 返回这些结果的目的是等添加完列再写 json 文件才能读到 types 信息
136
138
  if cl and db_n and tb_n:
137
139
  mysql_types.mysql_all_dtypes(db_name=db_name, table_name=table_name) # 更新一个表的 dtypes
@@ -208,6 +210,7 @@ class MysqlUpload:
208
210
  col_not_exist = cols
209
211
  # 对文件不存在的列信息进行数据类型转换(按指定规则)
210
212
  dtypes.update({col: self.convert_dtype_to_sql(df=df, col=col, dtype=df[col].dtype) for col in col_not_exist})
213
+ # print(dtypes)
211
214
  # 至此 df 中全部列类型已经转换完成
212
215
  # 返回结果, 示例: {'上市年份': 'mediumtext', '商品id': 'mediumtext', '平台': 'mediumtext'}
213
216
  return dtypes, cl, db_n, tb_n # 返回这些结果的目的是等添加完列再写 json 文件才能读到 types 信息
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 1.2.1
3
+ Version: 1.2.3
4
4
  Home-page: https://pypi.org/project/mdbsql
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -3,7 +3,7 @@
3
3
  from setuptools import setup, find_packages
4
4
 
5
5
  setup(name='mdbq',
6
- version='1.2.1',
6
+ version='1.2.3',
7
7
  author='xigua, ',
8
8
  author_email="2587125111@qq.com",
9
9
  url='https://pypi.org/project/mdbsql',
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes