mdbq 3.7.17__py3-none-any.whl → 3.7.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,16 +2,13 @@
2
2
  import re
3
3
  from mdbq.mysql import mysql
4
4
  from mdbq.mysql import s_query
5
- from mdbq.aggregation import optimize_data
6
- from mdbq.config import products
7
- from mdbq.config import default
5
+ from mdbq.config import config
8
6
  import datetime
9
7
  from dateutil.relativedelta import relativedelta
10
8
  import pandas as pd
11
9
  import numpy as np
12
10
  from functools import wraps
13
11
  import platform
14
- import getpass
15
12
  import json
16
13
  import os
17
14
  import time
@@ -24,9 +21,11 @@ import sys
24
21
  """
25
22
 
26
23
  """
27
- dirname = os.path.join(os.path.realpath(os.path.dirname(sys.argv[0])), 'support')
28
- targe_host, hostname, local = default.return_default_host()
29
- m_engine, username, password, host, port = default.get_mysql_engine(platform='Windows', hostname=hostname, sql='mysql', local=local, config_file=None)
24
+
25
+ content = config.read_config(file_path=os.path.join(os.path.realpath(os.path.dirname(sys.argv[0])), 'spd.cnf'))
26
+ username, password, host, port = content['username'], content['password'], content['host'], content['port']
27
+ m_engine = mysql.MysqlUpload(username=username, password=password, host=host, port=port, charset='utf8mb4')
28
+
30
29
  logger = logging.getLogger(__name__)
31
30
 
32
31
 
@@ -42,8 +41,6 @@ class MysqlDatasQuery:
42
41
  self.update_service = True # 调试时加,true: 将数据写入 mysql 服务器
43
42
  self.pf_datas = []
44
43
  self.pf_datas_jd = [] # 京东聚合销售表
45
- # self.output = set_support.SetSupport(dirname='support')
46
- self.output = dirname
47
44
 
48
45
  @staticmethod
49
46
  def try_except(func): # 在类内部定义一个异常处理方法
@@ -1454,9 +1451,13 @@ class MysqlDatasQuery:
1454
1451
  end_date=end_date,
1455
1452
  projection=projection,
1456
1453
  )
1457
- df.drop_duplicates(
1458
- subset=['日期', '店铺名称', '商品id', '商品访客数'], keep='last',
1459
- inplace=True, ignore_index=True)
1454
+ # df.drop_duplicates(
1455
+ # subset=['日期', '店铺名称', '商品id', '商品访客数'], keep='last',
1456
+ # inplace=True, ignore_index=True)
1457
+ # 保留最新日期的数据
1458
+ idx = df.groupby(['日期', '店铺名称', '商品id'])['更新时间'].idxmax()
1459
+ df = df.loc[idx]
1460
+
1460
1461
  df_set['商品id'] = df_set['商品id'].astype('int64')
1461
1462
  df_set = df_set[['商品id', '上市年份']]
1462
1463
  df['商品id'] = df['商品id'].astype('int64')
@@ -3180,10 +3181,6 @@ class MysqlDatasQuery:
3180
3181
  ],
3181
3182
  },
3182
3183
  ]
3183
- if as_file:
3184
- with open(os.path.join(self.output, f'分类配置.json'), 'w') as f:
3185
- json.dump(datas, f, ensure_ascii=False, sort_keys=False, indent=4)
3186
- breakpoint()
3187
3184
  result = ''
3188
3185
  res = []
3189
3186
  is_continue = False
@@ -3319,10 +3316,6 @@ class MysqlDatasQuery:
3319
3316
  ]
3320
3317
  },
3321
3318
  ]
3322
- if as_file:
3323
- with open(os.path.join(self.output, f'分类配置_推广人群分类_函数内置规则.json'), 'w') as f:
3324
- json.dump(datas, f, ensure_ascii=False, sort_keys=False, indent=4)
3325
- breakpoint()
3326
3319
  result = ''
3327
3320
  res = []
3328
3321
  is_continue = False
@@ -3784,6 +3777,15 @@ def query3(months=1, less_dict=[]):
3784
3777
  sdq.spph(db_name='聚合数据', table_name='天猫_商品排行')
3785
3778
 
3786
3779
 
3780
+ def op_data(db_name_lists, days: int = 63, is_mongo=True, is_mysql=True):
3781
+ # Mysql
3782
+ if is_mysql:
3783
+ s = mysql.OptimizeDatas(username=username, password=password, host=host, port=port)
3784
+ s.db_name_lists = db_name_lists
3785
+ s.days = days
3786
+ s.optimize_list()
3787
+
3788
+
3787
3789
  def main(days=150, months=3):
3788
3790
  """
3789
3791
  days: 清理聚合数据的日期长度,days 最好大于 3 * (months +1)
@@ -3791,8 +3793,6 @@ def main(days=150, months=3):
3791
3793
  """
3792
3794
  # 1. 更新日期表 更新货品年份基准表, 属性设置 3 - 货品年份基准
3793
3795
  date_table()
3794
- # p = products.Products()
3795
- # p.to_mysql()
3796
3796
 
3797
3797
  # 清理非聚合数据库
3798
3798
  db_list = [
@@ -3816,7 +3816,7 @@ def main(days=150, months=3):
3816
3816
  for step in range(len(db_list)):
3817
3817
  future_to_function = {
3818
3818
  executor.submit(
3819
- optimize_data.op_data,
3819
+ op_data,
3820
3820
  days=31,
3821
3821
  is_mongo=False,
3822
3822
  is_mysql=True,
@@ -3844,7 +3844,7 @@ def main(days=150, months=3):
3844
3844
  time.sleep(10)
3845
3845
 
3846
3846
  # 3. 清理聚合数据
3847
- optimize_data.op_data(
3847
+ op_data(
3848
3848
  db_name_lists=['聚合数据'],
3849
3849
  days=days, # 清理聚合数据的日期长度
3850
3850
  is_mongo=False,
@@ -3852,13 +3852,17 @@ def main(days=150, months=3):
3852
3852
  )
3853
3853
 
3854
3854
 
3855
+ def test():
3856
+ sdq = MysqlDatasQuery() # 实例化数据处理类
3857
+ sdq.months = 1 # 设置数据周期, 1 表示近 2 个月
3858
+ sdq.update_service = True # 调试时加,true: 将数据写入 mysql 服务器
3859
+ sdq.spph(db_name='聚合数据', table_name='天猫_商品排行')
3860
+
3855
3861
  if __name__ == '__main__':
3856
3862
  main(
3857
3863
  days=150, # 清理聚合数据的日期长度
3858
3864
  months=3 # 生成聚合数据的长度
3859
3865
  )
3860
3866
 
3861
- # sdq = MysqlDatasQuery() # 实例化数据处理类
3862
- # sdq.months = 20 # 设置数据周期, 1 表示近 2 个月
3863
- # sdq.tg_by_day(db_name='聚合数据', table_name='多店推广场景_按日聚合')
3867
+
3864
3868
 
mdbq/config/config.py ADDED
@@ -0,0 +1,95 @@
1
+ import os.path
2
+ import re
3
+
4
+
5
+ def read_config(file_path):
6
+ """读取配置文件,返回字典"""
7
+ if not os.path.isfile(file_path):
8
+ print(f'配置文件不存在: {file_path}')
9
+ return
10
+ config = {}
11
+ with open(file_path, 'r') as file:
12
+ for line in file:
13
+ stripped_line = line.strip()
14
+ # 跳过空行和注释行(以 # 或 // 开头)
15
+ if not stripped_line or stripped_line.startswith(('#', '//')):
16
+ continue
17
+ # 处理行内注释(# 或 // 前有空格)
18
+ comment_match = re.search(r'\s+[#//]', line)
19
+ if comment_match:
20
+ line = line[:comment_match.start()].strip()
21
+ else:
22
+ line = line.strip()
23
+ # 解析键值对
24
+ if '=' in line:
25
+ key, value = line.split('=', 1)
26
+ config[key.strip()] = value.strip()
27
+ return config
28
+
29
+
30
+ def write_config(file_path, rewrite):
31
+ """
32
+ 更新配置文件中的键值对,保留注释和其他内容,修复等号空格问题
33
+ 示例:
34
+ write_config('spd.cnf', {'is_spider': True})
35
+ """
36
+ # 读取所有行到内存
37
+ try:
38
+ with open(file_path, 'r') as file:
39
+ lines = file.readlines()
40
+ except FileNotFoundError:
41
+ with open(file_path, 'w') as file:
42
+ lines = []
43
+
44
+ new_lines = []
45
+ found_keys = set()
46
+
47
+ for line in lines:
48
+ stripped = line.strip()
49
+ if not stripped or stripped.startswith(('#', '//')):
50
+ new_lines.append(line)
51
+ continue
52
+
53
+ # 使用 partition 保留等号格式
54
+ key_part, sep, value_part = line.partition('=')
55
+ if not sep: # 没有等号的行直接保留
56
+ new_lines.append(line)
57
+ continue
58
+
59
+ key = key_part.strip()
60
+ if key in rewrite:
61
+ # 处理值部分和注释
62
+ comment_match = re.search(r'\s+([#//].*)$', value_part)
63
+ if comment_match:
64
+ comment = comment_match.group(0)
65
+ raw_value = value_part[:comment_match.start()].rstrip()
66
+ else:
67
+ comment = ''
68
+ raw_value = value_part.strip()
69
+
70
+ # 保留原值前导空格
71
+ leading_space = re.match(r'^(\s*)', value_part).group(1)
72
+ new_value = f"{leading_space}{rewrite[key]}{comment}"
73
+
74
+ # 构建新行(保留原等号格式)
75
+ new_line = f"{key_part}{sep}{new_value}\n"
76
+ new_lines.append(new_line)
77
+ found_keys.add(key)
78
+ else:
79
+ new_lines.append(line)
80
+
81
+ # 添加新键值对
82
+ for key in rewrite:
83
+ if key not in found_keys:
84
+ new_lines.append(f"{key} = {rewrite[key]}\n")
85
+
86
+ # 写入文件
87
+ with open(file_path, 'w') as file:
88
+ file.writelines(new_lines)
89
+
90
+
91
+ if __name__ == '__main__':
92
+ res = read_config('/Users/xigua/数据中心2/spider/spd.cnf')
93
+ print(res)
94
+ # write_config('spd.cnf', {'is_spider': False})
95
+
@@ -25,13 +25,13 @@ class Products:
25
25
  def update_my_datas(self):
26
26
  my_datas = [
27
27
  {
28
- '平台': '天猫', '商品id': '848929365673', '上市年份': '2024年11月'
28
+ '平台': '天猫', '商品id': '848929365000', '上市年份': '2024年11月'
29
29
  },
30
30
  {
31
- '平台': '天猫', '商品id': '840499705810', '上市年份': '2024年10月'
31
+ '平台': '天猫', '商品id': '840499705000', '上市年份': '2024年10月'
32
32
  },
33
33
  {
34
- '平台': '天猫', '商品id': '830789689032', '上市年份': '2024年9月'
34
+ '平台': '天猫', '商品id': '830789680000', '上市年份': '2024年9月'
35
35
  },
36
36
  {
37
37
  '平台': '天猫', '商品id': '822020840000', '上市年份': '2024年8月'
@@ -49,13 +49,13 @@ class Products:
49
49
  '平台': '天猫', '商品id': '778971448000', '上市年份': '2024年4月'
50
50
  },
51
51
  {
52
- '平台': '天猫', '商品id': '770576016820', '上市年份': '2024年3月'
52
+ '平台': '天猫', '商品id': '770576016800', '上市年份': '2024年3月'
53
53
  },
54
54
  {
55
55
  '平台': '天猫', '商品id': '766115058400', '上市年份': '2024年2月'
56
56
  },
57
57
  {
58
- '平台': '天猫', '商品id': '759478591187', '上市年份': '2024年1月'
58
+ '平台': '天猫', '商品id': '759478591100', '上市年份': '2024年1月'
59
59
  },
60
60
  {
61
61
  '平台': '天猫', '商品id': '752770183000', '上市年份': '2023年12月'
@@ -73,22 +73,22 @@ class Products:
73
73
  '平台': '天猫', '商品id': '730800000000', '上市年份': '2023年8月'
74
74
  },
75
75
  {
76
- '平台': '天猫', '商品id': '726939636835', '上市年份': '2023年7月'
76
+ '平台': '天猫', '商品id': '726939636000', '上市年份': '2023年7月'
77
77
  },
78
78
  {
79
- '平台': '天猫', '商品id': '721366048631', '上市年份': '2023年6月'
79
+ '平台': '天猫', '商品id': '721366048000', '上市年份': '2023年6月'
80
80
  },
81
81
  {
82
- '平台': '天猫', '商品id': '716130443004', '上市年份': '2023年5月'
82
+ '平台': '天猫', '商品id': '716130443000', '上市年份': '2023年5月'
83
83
  },
84
84
  {
85
- '平台': '天猫', '商品id': '709824308589', '上市年份': '2023年4月'
85
+ '平台': '天猫', '商品id': '709824308000', '上市年份': '2023年4月'
86
86
  },
87
87
  {
88
- '平台': '天猫', '商品id': '705440027804', '上市年份': '2023年3月'
88
+ '平台': '天猫', '商品id': '705440027000', '上市年份': '2023年3月'
89
89
  },
90
90
  {
91
- '平台': '天猫', '商品id': '701096067973', '上市年份': '2023年2月'
91
+ '平台': '天猫', '商品id': '701096067900', '上市年份': '2023年2月'
92
92
  },
93
93
  {
94
94
  '平台': '天猫', '商品id': '696017000000', '上市年份': '2023年1月'
mdbq/mysql/mysql.py CHANGED
@@ -1,7 +1,6 @@
1
1
  # -*- coding:utf-8 -*-
2
2
  import datetime
3
3
  import platform
4
- import getpass
5
4
  import re
6
5
  import time
7
6
  from functools import wraps
@@ -12,20 +11,12 @@ import pandas as pd
12
11
  from sqlalchemy import create_engine
13
12
  import os
14
13
  import calendar
15
- from mdbq.config import myconfig
16
- import traceback
17
14
  import logging
18
15
 
19
16
  warnings.filterwarnings('ignore')
20
17
  """
21
18
  建表流程:
22
-
23
-
24
19
  建表规范:
25
- 1. 数据库和数据表名如果有字母,必须使用小写,大写在建库后会自动变小写,再次上传数据会找不到数据库(macos和linux都有这种情况)
26
- 2. 无论是数据库/表/列名还是值,尽量避免特殊字符或者表情符号,数据库/表/列名尽量都使用 `列名` 转义,避免错误
27
- 3. 小数必须使用 decimal, 禁止 float 和 double, 因为计算精度差异,后续需要聚合数据时会引发很多问题
28
-
29
20
  """
30
21
  logger = logging.getLogger(__name__)
31
22
 
@@ -1738,7 +1729,4 @@ def year_month_day_bak(start_date, end_date):
1738
1729
 
1739
1730
 
1740
1731
  if __name__ == '__main__':
1741
- conf = myconfig.main()
1742
- data = conf['Windows']['xigua_lx']['mysql']['local']
1743
- username, password, host, port = data['username'], data['password'], data['host'], data['port']
1744
- logger.info(username, password, host, port)
1732
+ pass
mdbq/mysql/s_query.py CHANGED
@@ -11,7 +11,6 @@ import pandas as pd
11
11
  from sqlalchemy import create_engine
12
12
  import os
13
13
  import calendar
14
- from mdbq.dataframe import converter
15
14
  from decimal import Decimal
16
15
  import logging
17
16
 
@@ -11,24 +11,21 @@ import time
11
11
  import warnings
12
12
  import pandas as pd
13
13
  from lxml import etree
14
- from rich.pretty import pretty_repr
15
14
  from selenium import webdriver
16
15
  from selenium.webdriver.support.wait import WebDriverWait
17
16
  from selenium.webdriver.common.by import By
18
17
  from selenium.webdriver.support import expected_conditions as EC
19
18
  from selenium.webdriver.chrome.service import Service
20
19
  from mdbq.config import set_support
21
- from mdbq.config import default
20
+ from mdbq.config import config
22
21
  from mdbq.mysql import mysql
23
22
  from mdbq.mysql import s_query
24
23
  from mdbq.other import ua_sj
25
24
  import requests
26
-
27
25
  import io
28
26
  from openpyxl import load_workbook
29
27
  from openpyxl.drawing.image import Image
30
28
  from openpyxl.utils import get_column_letter
31
- from setuptools.sandbox import save_path
32
29
 
33
30
  warnings.filterwarnings('ignore')
34
31
  """
@@ -49,8 +46,10 @@ upload_path = os.path.join(D_PATH, '数据上传中心') # 此目录位于下
49
46
  if not os.path.exists(upload_path): # 数据中心根目录
50
47
  os.makedirs(upload_path)
51
48
 
52
- targe_host, hostname, local = default.return_default_host()
53
- m_engine, username, password, host, port = default.get_mysql_engine(platform='Windows', hostname=hostname, sql='mysql', local=local, config_file=None)
49
+ content = config.read_config(file_path=os.path.join(os.path.realpath(os.path.dirname(sys.argv[0])), 'spd.cnf'))
50
+ username, password, host, port = content['username'], content['password'], content['host'], content['port']
51
+ m_engine = mysql.MysqlUpload(username=username, password=password, host=host, port=port, charset='utf8mb4')
52
+
54
53
  if not username:
55
54
  print(f'找不到主机:')
56
55
 
mdbq/redis/getredis.py CHANGED
@@ -3,8 +3,6 @@ import os
3
3
  import sys
4
4
  import random
5
5
  import redis
6
- from mdbq.mysql import s_query
7
- from mdbq.config import default
8
6
  import pandas as pd
9
7
  import numpy as np
10
8
  import json
@@ -15,9 +13,6 @@ from logging.handlers import RotatingFileHandler
15
13
  from decimal import Decimal
16
14
  import orjson
17
15
 
18
- targe_host, hostname, local = default.return_default_host()
19
- m_engine, username, password, host, port = default.get_mysql_engine(platform='Windows', hostname=hostname, sql='mysql', local=local, config_file=None)
20
-
21
16
  # 获取当前模块的日志记录器
22
17
  logger = logging.getLogger(__name__)
23
18
 
@@ -603,34 +598,4 @@ class RedisDataHash(object):
603
598
 
604
599
 
605
600
  if __name__ == '__main__':
606
- # # ****************************************************
607
- # # 这一部分在外部定义,只需要定义一次,开始
608
- # redis_config = {
609
- # 'host': '127.0.0.1',
610
- # 'port': 6379, # 默认Redis端口
611
- # 'db': 0, # 默认Redis数据库索引
612
- # # 'username': 'default',
613
- # 'password': redis_password,
614
- # }
615
- # # redis 实例化
616
- # r = redis.Redis(**redis_config)
617
- # # mysql 实例化
618
- # d = s_query.QueryDatas(username=username, password=password, host=host, port=port)
619
- # # 将两个库的实例化对传给 RedisData 类,并实例化数据处理引擎
620
- # m = RedisData(redis_engin=r, download=d)
621
- # # ****************************************************
622
- #
623
- # # 以下为动态获取数据库数据
624
- # db_name = '聚合数据'
625
- # table_name = '多店推广场景_按日聚合'
626
- # set_year = False
627
- # df = m.get_from_redis(
628
- # db_name=db_name,
629
- # table_name=table_name,
630
- # set_year=set_year,
631
- # start_date='2025-01-01',
632
- # end_date='2025-01-31'
633
- # )
634
- # logger.info(df)
635
- #
636
601
  pass
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 3.7.17
3
+ Version: 3.7.19
4
4
  Home-page: https://pypi.org/project/mdbq
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -1,14 +1,15 @@
1
1
  mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
2
2
  mdbq/__version__.py,sha256=y9Mp_8x0BCZSHsdLT_q5tX9wZwd5QgqrSIENLrb6vXA,62
3
3
  mdbq/aggregation/__init__.py,sha256=EeDqX2Aml6SPx8363J-v1lz0EcZtgwIBYyCJV6CcEDU,40
4
- mdbq/aggregation/optimize_data.py,sha256=wB7prQdZAHyjzXH9V8g8X_JBMdvCCUITN1hVwK72Tdg,952
5
- mdbq/aggregation/query_data.py,sha256=I2esDWtp_dg49bqvD-qIZIpF9Z2kyo8My_oWuo6RcC4,174468
4
+ mdbq/aggregation/optimize_data_bak.py,sha256=wB7prQdZAHyjzXH9V8g8X_JBMdvCCUITN1hVwK72Tdg,952
5
+ mdbq/aggregation/query_data.py,sha256=qA-JsYwbK7aRDdwUATPBTbKdR_64jsB20edr8MMGvPk,174306
6
6
  mdbq/bdup/__init__.py,sha256=AkhsGk81SkG1c8FqDH5tRq-8MZmFobVbN60DTyukYTY,28
7
7
  mdbq/bdup/bdup.py,sha256=hJs815hGFwm_X5bP2i9XugG2w2ZY_F0n3-Q0hVpIPPw,4892
8
8
  mdbq/config/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
9
- mdbq/config/default.py,sha256=pOBmlxmGzmK8XYstv_qwE4vDwJmytEmVIYE0bDnCXOs,2143
10
- mdbq/config/myconfig.py,sha256=5gg3B8fnDx2_a5I3kfPLXx7xePqZwfb3ohj6iMVhplc,1022
11
- mdbq/config/products.py,sha256=02sZAhFyF5dpapXj9h5iOKMXvC4ZNZjHTnQZgbzc9NA,5731
9
+ mdbq/config/config.py,sha256=TdSGY8-lS-dJiH51mi3QAPRs6ZvvW1Q4zNiZYwrHiiY,3098
10
+ mdbq/config/default_bak.py,sha256=pOBmlxmGzmK8XYstv_qwE4vDwJmytEmVIYE0bDnCXOs,2143
11
+ mdbq/config/myconfig_bak.py,sha256=5gg3B8fnDx2_a5I3kfPLXx7xePqZwfb3ohj6iMVhplc,1022
12
+ mdbq/config/products_bak.py,sha256=kWN3NWp9uT6D0M4Vw33yqOo9Tb98hEO8-X-llRjoY8M,5731
12
13
  mdbq/config/set_support.py,sha256=7C7NFy7Em_uC7lig54qQlIlKG_AJeMCskxzK87anGkM,462
13
14
  mdbq/dataframe/__init__.py,sha256=2HtCN8AdRj53teXDqzysC1h8aPL-mMFy561ESmhehGQ,22
14
15
  mdbq/dataframe/converter.py,sha256=lETYhT7KXlWzWwqguqhk6vI6kj4rnOBEW1lhqKy2Abc,5035
@@ -17,11 +18,11 @@ mdbq/log/mylogger.py,sha256=oaT7Bp-Hb9jZt52seP3ISUuxVcI19s4UiqTeouScBO0,3258
17
18
  mdbq/log/spider_logging.py,sha256=59xe4Ckb7m-sBt3GYk8DC_hQg7-jnjBRq1o718r1Ry8,1676
18
19
  mdbq/mongo/__init__.py,sha256=SILt7xMtQIQl_m-ik9WLtJSXIVf424iYgCfE_tnQFbw,13
19
20
  mdbq/mysql/__init__.py,sha256=A_DPJyAoEvTSFojiI2e94zP0FKtCkkwKP1kYUCSyQzo,11
20
- mdbq/mysql/mysql.py,sha256=bsv-khT7fyoYxEJMbJPks-V1tYvwX-mNHsoTNXfWiKk,95884
21
- mdbq/mysql/s_query.py,sha256=pj5ioJfUT81Su9S-km9G49gF5F2MmXXfw_oAIUzhN28,8794
21
+ mdbq/mysql/mysql.py,sha256=UoZPVyrgDp5L8-i0jVptkal9G64oNrdhNwa-xpp8txo,95127
22
+ mdbq/mysql/s_query.py,sha256=09Dp7DrVXui6dAI6zFDfrsUOdjPblF_oYUpgqbZMhXg,8757
22
23
  mdbq/mysql/year_month_day.py,sha256=VgewoE2pJxK7ErjfviL_SMTN77ki8GVbTUcao3vFUCE,1523
23
24
  mdbq/other/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
24
- mdbq/other/download_sku_picture.py,sha256=Da_5hsw3yeDm2lXE-YLMbIxk2-36K5-o7D3Sm9APj9M,44760
25
+ mdbq/other/download_sku_picture.py,sha256=Ttge8kX0nz63yX8351Pz49fEXTKPhScFKjOyv1SSRfY,44798
25
26
  mdbq/other/porxy.py,sha256=UHfgEyXugogvXgsG68a7QouUCKaohTKKkI4RN-kYSdQ,4961
26
27
  mdbq/other/pov_city.py,sha256=AEOmCOzOwyjHi9LLZWPKi6DUuSC-_M163664I52u9qw,21050
27
28
  mdbq/other/ua_sj.py,sha256=JuVYzc_5QZ9s_oQSrTHVKkQv4S_7-CWx4oIKOARn_9U,22178
@@ -30,10 +31,10 @@ mdbq/pbix/pbix_refresh.py,sha256=JUjKW3bNEyoMVfVfo77UhguvS5AWkixvVhDbw4_MHco,239
30
31
  mdbq/pbix/refresh_all.py,sha256=OBT9EewSZ0aRS9vL_FflVn74d4l2G00wzHiikCC4TC0,5926
31
32
  mdbq/pbix/refresh_all_old.py,sha256=_pq3WSQ728GPtEG5pfsZI2uTJhU8D6ra-htIk1JXYzw,7192
32
33
  mdbq/redis/__init__.py,sha256=YtgBlVSMDphtpwYX248wGge1x-Ex_mMufz4-8W0XRmA,12
33
- mdbq/redis/getredis.py,sha256=MLVWENvFtLFpxUlkUMBvc5bXPXeG-QIl8f_A200TIWE,25497
34
+ mdbq/redis/getredis.py,sha256=1pTga2iINx0NV2ffl0D-aspZhrZMDQR8SpohAv1acoo,24076
34
35
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
35
36
  mdbq/spider/aikucun.py,sha256=o_QwFWbD6O2F56k6bwnpVV55EcdFCyes05ON7iu9TrA,21882
36
- mdbq-3.7.17.dist-info/METADATA,sha256=QcNNflNFDMslBNGYbl047wqJGPBu7Ohd_WiowCL7xCk,244
37
- mdbq-3.7.17.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
38
- mdbq-3.7.17.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
39
- mdbq-3.7.17.dist-info/RECORD,,
37
+ mdbq-3.7.19.dist-info/METADATA,sha256=R-8wefH_irfcE7wlkHhT9dahjDVCG7BzrXwimDO1YO0,244
38
+ mdbq-3.7.19.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91
39
+ mdbq-3.7.19.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
40
+ mdbq-3.7.19.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.44.0)
2
+ Generator: setuptools (70.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
File without changes
File without changes