mdbq 0.2.7__tar.gz → 0.2.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mdbq-0.2.7 → mdbq-0.2.9}/PKG-INFO +1 -1
- mdbq-0.2.9/mdbq/aggregation/optimize_data.py +22 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/aggregation/query_data.py +4 -1
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/company/copysh.py +33 -2
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/mysql/data_types.py +9 -9
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq.egg-info/PKG-INFO +1 -1
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq.egg-info/SOURCES.txt +1 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/setup.py +1 -1
- {mdbq-0.2.7 → mdbq-0.2.9}/README.txt +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/__version__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/aggregation/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/aggregation/aggregation.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/bdup/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/bdup/bdup.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/clean/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/clean/data_clean.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/company/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/config/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/config/get_myconf.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/config/products.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/config/update_conf.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/dataframe/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/dataframe/converter.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/log/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/log/mylogger.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/mongo/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/mongo/mongo.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/mysql/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/mysql/mysql.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/mysql/s_query.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/mysql/year_month_day.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/other/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/other/porxy.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/other/pov_city.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/other/ua_sj.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/pbix/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/pbix/pbix_refresh.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/pbix/refresh_all.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq/spider/__init__.py +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq.egg-info/dependency_links.txt +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/mdbq.egg-info/top_level.txt +0 -0
- {mdbq-0.2.7 → mdbq-0.2.9}/setup.cfg +0 -0
@@ -0,0 +1,22 @@
|
|
1
|
+
# -*- coding: UTF-8 –*-
|
2
|
+
from mdbq.mysql import mysql
|
3
|
+
from mdbq.config import get_myconf
|
4
|
+
"""
|
5
|
+
对指定数据库所有冗余数据进行清理
|
6
|
+
"""
|
7
|
+
|
8
|
+
|
9
|
+
def op_data(service_databases, days: int = 63):
|
10
|
+
for service_database in service_databases:
|
11
|
+
for service_name, database in service_database.items():
|
12
|
+
username, password, host, port = get_myconf.select_config_values(target_service=service_name, database=database)
|
13
|
+
s = mysql.OptimizeDatas(username=username, password=password, host=host, port=port)
|
14
|
+
s.db_name_lists = [
|
15
|
+
'聚合数据',
|
16
|
+
]
|
17
|
+
s.days = days
|
18
|
+
s.optimize_list()
|
19
|
+
|
20
|
+
|
21
|
+
if __name__ == '__main__':
|
22
|
+
op_data(service_databases=[{'home_lx': 'mysql'}], days=3650)
|
@@ -2,6 +2,7 @@
|
|
2
2
|
from mdbq.mongo import mongo
|
3
3
|
from mdbq.mysql import mysql
|
4
4
|
from mdbq.mysql import s_query
|
5
|
+
from mdbq.aggregation import optimize_data
|
5
6
|
from mdbq.config import get_myconf
|
6
7
|
import datetime
|
7
8
|
from dateutil.relativedelta import relativedelta
|
@@ -465,7 +466,7 @@ def data_aggregation(service_databases=[{}]):
|
|
465
466
|
for service_database in service_databases:
|
466
467
|
for service_name, database in service_database.items():
|
467
468
|
sdq = MysqlDatasQuery(target_service=service_name) # 实例化数据处理类
|
468
|
-
sdq.months =
|
469
|
+
sdq.months = 1 # 设置数据周期, 1 表示近 2 个月
|
469
470
|
g = GroupBy() # 实例化数据聚合类
|
470
471
|
# 实例化数据库连接
|
471
472
|
username, password, host, port = get_myconf.select_config_values(target_service=service_name, database=database)
|
@@ -511,6 +512,8 @@ def data_aggregation(service_databases=[{}]):
|
|
511
512
|
res = g.performance() # 盈亏表,依赖其他表,单独做
|
512
513
|
m.df_to_mysql(df=res, db_name='聚合数据', tabel_name='销售盈亏')
|
513
514
|
|
515
|
+
optimize_data.op_data(service_databases=service_databases, days=3650) # 立即启动对聚合数据的清理工作
|
516
|
+
|
514
517
|
|
515
518
|
if __name__ == '__main__':
|
516
519
|
data_aggregation(service_databases=[{'company': 'mysql'}])
|
@@ -9,11 +9,14 @@ import datetime
|
|
9
9
|
import shutil
|
10
10
|
import time
|
11
11
|
import re
|
12
|
-
|
12
|
+
import socket
|
13
13
|
from dateutil.utils import today
|
14
14
|
from mdbq.bdup import bdup
|
15
15
|
from mdbq.aggregation import aggregation
|
16
|
+
from mdbq.aggregation import query_data
|
17
|
+
from mdbq.aggregation import optimize_data
|
16
18
|
from mdbq.config import update_conf
|
19
|
+
from mdbq.config import get_myconf
|
17
20
|
warnings.filterwarnings('ignore')
|
18
21
|
|
19
22
|
|
@@ -293,6 +296,33 @@ class UpdateMysql:
|
|
293
296
|
return False, self.d_path
|
294
297
|
|
295
298
|
|
299
|
+
def op_data(days: int =3650):
|
300
|
+
|
301
|
+
# 清理数据库, 除了 聚合数据
|
302
|
+
if socket.gethostname() == 'company': # 公司台式机自身运行
|
303
|
+
# Mysql
|
304
|
+
username, password, host, port = get_myconf.select_config_values(
|
305
|
+
target_service='company',
|
306
|
+
database='mysql',
|
307
|
+
)
|
308
|
+
s = mysql.OptimizeDatas(username=username, password=password, host=host, port=port)
|
309
|
+
s.db_name_lists = [
|
310
|
+
'京东数据2',
|
311
|
+
'天猫数据2',
|
312
|
+
'市场数据2',
|
313
|
+
'生意参谋数据2',
|
314
|
+
'生意经2',
|
315
|
+
'属性设置2',
|
316
|
+
# '聚合数据',
|
317
|
+
]
|
318
|
+
s.days = days
|
319
|
+
s.optimize_list()
|
320
|
+
|
321
|
+
# 聚合数据,并清理聚合数据
|
322
|
+
query_data.data_aggregation(service_databases=[{'company': 'mysql'}])
|
323
|
+
|
324
|
+
|
325
|
+
|
296
326
|
def main():
|
297
327
|
t = TbFiles()
|
298
328
|
u = UpdateMysql()
|
@@ -308,10 +338,11 @@ def main():
|
|
308
338
|
dp.cleaning(is_move=True) # 公司台式机需要移除
|
309
339
|
dp.upload_df(service_databases=[{'company': 'mysql'}])
|
310
340
|
|
311
|
-
#
|
341
|
+
# 此操作用于修改 .copysh_conf 文件,将 ch_record 改为 false (更新完成)
|
312
342
|
w = update_conf.UpdateConf(filename='.copysh_conf')
|
313
343
|
w.update_config(option='ch_record', new_value='False')
|
314
344
|
time.sleep(60)
|
345
|
+
op_data(days=3650) # 数据清理和聚合
|
315
346
|
|
316
347
|
t.sleep_minutes = 30 # 同步前休眠时间
|
317
348
|
t.tb_file()
|
@@ -155,13 +155,13 @@ class DataTypes:
|
|
155
155
|
if collection_name in list(self.datas[cl][db_name].keys()):
|
156
156
|
return self.datas[cl][db_name][collection_name]
|
157
157
|
else:
|
158
|
-
print(f'不存在的集合名信息: {collection_name}')
|
158
|
+
print(f'不存在的集合名信息: {collection_name}, 文件位置: {json_file}')
|
159
159
|
return {}
|
160
160
|
else:
|
161
|
-
print(f'不存在的数据库信息: {db_name}')
|
161
|
+
print(f'不存在的数据库信息: {db_name}, 文件位置: {json_file}')
|
162
162
|
return {}
|
163
163
|
else:
|
164
|
-
print(f'不存在的数据分类: {cl}')
|
164
|
+
print(f'不存在的数据分类: {cl}, 文件位置: {json_file}')
|
165
165
|
return {}
|
166
166
|
|
167
167
|
|
@@ -252,10 +252,10 @@ def main():
|
|
252
252
|
|
253
253
|
if __name__ == '__main__':
|
254
254
|
# main()
|
255
|
-
|
255
|
+
mysql_all_dtypes() # 更新 mysql 中所有数据库的 dtypes 信息到本地 json
|
256
256
|
|
257
|
-
path = '/Users/xigua/数据中心/自动0备份/py/数据更新/support'
|
258
|
-
d = DataTypes()
|
259
|
-
# 从本地文件中读取 dtype 信息
|
260
|
-
dtypes = d.load_dtypes(cl='mysql', db_name='生意经2', collection_name='店铺指标', path=path)
|
261
|
-
print(dtypes)
|
257
|
+
# path = '/Users/xigua/数据中心/自动0备份/py/数据更新/support'
|
258
|
+
# d = DataTypes()
|
259
|
+
# # 从本地文件中读取 dtype 信息
|
260
|
+
# dtypes = d.load_dtypes(cl='mysql', db_name='生意经2', collection_name='店铺指标', path=path)
|
261
|
+
# print(dtypes)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|