mdbq 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -20,6 +20,17 @@ import shutil
20
20
  import getpass
21
21
 
22
22
  warnings.filterwarnings('ignore')
23
+ """
24
+ 1. DatabaseUpdate: 程序用于对爬虫下载的原始数据进行清洗并入库;
25
+ 数据清洗主要包括对字段名的非法字符处理,对 df 中的非法值进行预处理;
26
+ 数据入库时会较检并更新本地 json 文件的 dtypes 信息;
27
+ 若 json 缺失 dtypes 信息, 可用 update_dtypte 先更新, 或者手动修改添加本地 json 信息;
28
+ 2. DataTypes: 类用于将某个csv文件的 dtypes 信息存入本地 json 文件, 会调用 converter 对 df 预处理;
29
+ 作用于完善某个数据库 dtypes 信息,可以使用本函数更新;
30
+ 3. update_dtypte: 函数将一个 csv 文件的 dtypes 信息更新至本地 json 文件;
31
+ 4. upload: 函数将一个文件夹上传至数据库;
32
+ 如果本地 json 中确实这个数据库的 dtypes 信息, 请用 update_dtypte 更新 json 文件再执行数据上传;
33
+ """
23
34
 
24
35
 
25
36
  class DataTypes:
@@ -886,14 +897,6 @@ class DatabaseUpdate:
886
897
  return encod
887
898
 
888
899
 
889
- def main():
890
- d = DatabaseUpdate(path='/Users/xigua/Downloads')
891
- d.new_unzip(is_move=True)
892
- d.cleaning(is_move=False)
893
- d.upload_df(service_databases=[{'home_lx': 'mongodb'}, {'home_lx': 'mysql'}])
894
- # print(d.datas)
895
-
896
-
897
900
  def update_dtypte():
898
901
  """ 更新一个文件的 dtype 信息到 json 文件 """
899
902
  file = '/Users/xigua/数据中心/原始文件2/月数据/流量来源/【生意参谋平台】无线店铺流量来源-2023-04-01_2023-04-30.csv'
@@ -910,9 +913,9 @@ def update_dtypte():
910
913
 
911
914
  def upload():
912
915
  """ 上传一个文件夹到数据库 """
913
- path = '/Users/xigua/数据中心/原始文件2/生意经/宝贝指标'
916
+ path = '/Users/xigua/数据中心/原始文件2/生意经/店铺指标'
914
917
  db_name = '生意经2'
915
- collection_name = '宝贝指标'
918
+ collection_name = '店铺指标'
916
919
 
917
920
  username, password, host, port = get_myconf.select_config_values(
918
921
  target_service='home_lx',
@@ -935,6 +938,16 @@ def upload():
935
938
  host=host,
936
939
  port=port,
937
940
  )
941
+ username, password, host, port = get_myconf.select_config_values(
942
+ target_service='nas',
943
+ database='mysql',
944
+ )
945
+ nas = mysql.MysqlUpload(
946
+ username=username,
947
+ password=password,
948
+ host=host,
949
+ port=port,
950
+ )
938
951
 
939
952
  dt = DataTypes()
940
953
  dtypes = dt.load_dtypes(
@@ -967,16 +980,23 @@ def upload():
967
980
 
968
981
  d.df_to_mongo(df=df, db_name=db_name, collection_name=collection_name)
969
982
  m.df_to_mysql(df=df, db_name=db_name, tabel_name=collection_name)
983
+ nas.df_to_mysql(df=df, db_name=db_name, tabel_name=collection_name)
970
984
  except Exception as e:
971
985
  print(name, e)
972
- # break
973
- # break
974
986
  if d.client:
975
987
  d.client.close() # 必须手动关闭数据库连接
976
988
 
977
989
 
990
+ def main():
991
+ d = DatabaseUpdate(path='/Users/xigua/Downloads')
992
+ d.new_unzip(is_move=True)
993
+ d.cleaning(is_move=False)
994
+ d.upload_df(service_databases=[{'home_lx': 'mongodb'}, {'home_lx': 'mysql'}])
995
+ # print(d.datas)
996
+
997
+
978
998
  if __name__ == '__main__':
979
- # username, password, host, port = get_myconf.select_config_values(target_service='aliyun', database='mongodb')
999
+ # username, password, host, port = get_myconf.select_config_values(target_service='nas', database='mysql')
980
1000
  # print(username, password, host, port)
981
1001
 
982
1002
  # main()
@@ -10,6 +10,9 @@ import platform
10
10
  import getpass
11
11
  import json
12
12
  import os
13
+ """
14
+ 程序用于下载数据库(调用 s_query.py 下载并清洗), 并对数据进行聚合清洗, 不会更新数据库信息;
15
+ """
13
16
 
14
17
 
15
18
  class MongoDatasQuery:
@@ -125,6 +128,27 @@ class MysqlDatasQuery:
125
128
  )
126
129
  return df
127
130
 
131
+ def dplyd(self):
132
+ start_date, end_date = self.months_data(num=self.months)
133
+ projection = {
134
+ '日期': 1,
135
+ '一级来源': 1,
136
+ '二级来源': 1,
137
+ '三级来源': 1,
138
+ '访客数': 1,
139
+ '支付金额': 1,
140
+ '支付买家数': 1,
141
+ '支付转化率': 1,
142
+ '加购人数': 1,
143
+ }
144
+ df = self.download.data_to_df(
145
+ db_name='生意参谋数据2',
146
+ tabel_name='店铺来源_日数据',
147
+ start_date=start_date,
148
+ end_date=end_date,
149
+ projection=projection,
150
+ )
151
+ return df
128
152
 
129
153
  @staticmethod
130
154
  def months_data(num=0, end_date=None):
@@ -137,7 +161,9 @@ class MysqlDatasQuery:
137
161
 
138
162
 
139
163
  class GroupBy:
140
- """ 数据聚合和导出 """
164
+ """
165
+ 数据聚合和导出
166
+ """
141
167
  def __init__(self):
142
168
  # self.output: 数据库默认导出目录
143
169
  if platform.system() == 'Darwin':
@@ -194,7 +220,7 @@ class GroupBy:
194
220
  )
195
221
  df.insert(loc=1, column='推广渠道', value='万相台无界版') # df中插入新列
196
222
  return df
197
- if '宝贝指标' in tabel_name:
223
+ elif '宝贝指标' in tabel_name:
198
224
  df.fillna(0, inplace=True)
199
225
  df = df[(df['销售额'] != 0) | (df['退款额'] != 0)]
200
226
  df = df.groupby(['日期', '宝贝id', '商家编码', '行业类目'], as_index=False).agg(
@@ -216,6 +242,8 @@ class GroupBy:
216
242
  else '300以下'
217
243
  )
218
244
  return df
245
+ elif '店铺来源_日数据' in tabel_name:
246
+ return df
219
247
  else:
220
248
  print(f'<{tabel_name}>: Groupby 类尚未配置,数据为空')
221
249
  return pd.DataFrame({})
@@ -302,7 +330,7 @@ class GroupBy:
302
330
  index=index, header=header, engine=engine, freeze_panes=freeze_panes)
303
331
 
304
332
 
305
- def main():
333
+ def data_output():
306
334
  sdq = MysqlDatasQuery(target_service='home_lx')
307
335
  sdq.months = 0
308
336
 
@@ -311,10 +339,15 @@ def main():
311
339
  # df = g.groupby(df=df, tabel_name='推广数据_宝贝主体报表', is_maximize=True)
312
340
  # g.as_csv(df=df, filename='推广数据_宝贝主体报表') # 数据导出
313
341
 
314
- df = sdq.syj()
342
+ # df = sdq.syj()
343
+ # g = GroupBy()
344
+ # df = g.groupby(df=df, tabel_name='宝贝指标', is_maximize=True)
345
+ # g.as_csv(df=df, filename='宝贝指标')
346
+
347
+ df = sdq.dplyd()
315
348
  g = GroupBy()
316
- df = g.groupby(df=df, tabel_name='宝贝指标', is_maximize=True)
317
- g.as_csv(df=df, filename='宝贝指标')
349
+ df = g.groupby(df=df, tabel_name='店铺来源_日数据', is_maximize=True)
350
+ g.as_csv(df=df, filename='店铺来源_日数据')
318
351
 
319
352
 
320
353
  if __name__ == '__main__':
mdbq/company/copysh.py CHANGED
@@ -10,7 +10,7 @@ import shutil
10
10
  import time
11
11
  import re
12
12
  from mdbq.bdup import bdup
13
- from mdbq.aggregation import database
13
+ from mdbq.aggregation import aggregation
14
14
  from mdbq.config import update_conf
15
15
  warnings.filterwarnings('ignore')
16
16
 
mdbq/config/get_myconf.py CHANGED
@@ -106,6 +106,10 @@ def select_config_values(target_service, database, path=None):
106
106
  elif database == 'mysql':
107
107
  options = ['username_mysql_company_nw', 'password_mysql_company_nw', 'host_mysql_company_nw', 'port_mysql_company_nw', ]
108
108
 
109
+ elif target_service == 'nas': # 4. 群晖
110
+ if database == 'mysql':
111
+ options = ['username_mysql_nas_nw', 'password_mysql_nas_nw', 'host_mysql_nas_nw', 'port_mysql_nas_nw', ]
112
+
109
113
  value = m.get_myconf(options=options)
110
114
  if not value:
111
115
  return '', '', '', 0
@@ -44,14 +44,18 @@ class DataFrameConverter(object):
44
44
  pass
45
45
  new_col = col.lower()
46
46
  new_col = re.sub(r'[\',,()()/=<>+\-*^"’\[\]~#|&% .;]', '_', new_col)
47
+ new_col = re.sub(r'_+$', '', new_col)
47
48
  df.rename(columns={col: new_col}, inplace=True)
48
49
  df.fillna(0, inplace=True)
49
50
  return df
50
51
 
51
52
 
52
53
  if __name__ == '__main__':
53
- df = pd.DataFrame(np.random.randn(5, 3), columns=['a', 'b', 'c'])
54
- converter = DataFrameConverter()
55
- df = converter.convert_df_cols(df)
56
- print(df['a'].dtype)
57
- print(df)
54
+ # df = pd.DataFrame(np.random.randn(5, 3), columns=['a', 'b', 'c'])
55
+ # converter = DataFrameConverter()
56
+ # df = converter.convert_df_cols(df)
57
+ # print(df['a'].dtype)
58
+ # print(df)
59
+ pattern = 'dfa_dfawr__'
60
+ pattern = re.sub(r'_+$', '', pattern)
61
+ print(pattern)
mdbq/mongo/mongo.py CHANGED
@@ -195,6 +195,7 @@ class DownMongo:
195
195
 
196
196
  def data_to_file(self, file_type, db_name, collection_name):
197
197
  """
198
+ 用于 GUI 的函数
198
199
  将 mongodb 数据保存本地
199
200
  db_name: 数据库名
200
201
  collections 集合名
@@ -206,9 +207,9 @@ class DownMongo:
206
207
  _collection = self.client[self.db_name][self.collection_name] # 连接集合
207
208
  now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S ")
208
209
  if not self.start_date:
209
- print(f'{now}正在下载数据 ({self.host}) {self.db_name}: {self.collection_name}, 数据区间: 近 {self.days} 天\n...')
210
+ print(f'{now}正在下载 ({self.host}) {self.db_name}: {self.collection_name}, 区间: 近 {self.days} 天\n...')
210
211
  else:
211
- print(f'{now}正在下载数据 ({self.host}) {self.db_name}: {self.collection_name}, 数据区间: {self.start_date} ~ {self.end_date}')
212
+ print(f'{now}正在下载 ({self.host}) {self.db_name}: {self.collection_name}, 区间: {self.start_date}~{self.end_date}')
212
213
 
213
214
  if not self.start_date:
214
215
  self.start_date = datetime.datetime.now() - datetime.timedelta(days=self.days)
@@ -221,11 +222,10 @@ class DownMongo:
221
222
  {'$project': {'_id': 0}}, # 不保留 id 字段
222
223
  ]
223
224
  results = _collection.aggregate(pipeline)
224
- # print(results)
225
+
225
226
  # 输出结果
226
227
  datas = []
227
228
  for doc in results:
228
- # print(doc)
229
229
  datas.append(doc)
230
230
  _df = pd.DataFrame(datas)
231
231
  if len(_df) == 0:
@@ -251,7 +251,7 @@ class DownMongo:
251
251
  _df.to_excel(_path, index=False, header=True, engine='openpyxl', freeze_panes=(1, 0)) # freeze_ 冻结列索引
252
252
  else:
253
253
  print(f'{file_type}: 未支持的文件类型')
254
- print(f'{self.collection_name} 保存路径: {_path}, 数据完成!\n-->> 注意数据库导出的数据没有排重!')
254
+ print(f'<{self.collection_name}> 导出: {_path}, 数据完成!')
255
255
  self.client.close()
256
256
 
257
257
 
@@ -655,6 +655,28 @@ class OptimizeDatas:
655
655
  start_date += datetime.timedelta(days=1)
656
656
  return date_list
657
657
 
658
+ def rename_column(self):
659
+ """ 批量修改数据库的列名 """
660
+ """
661
+ # for db_name in ['京东数据2', '天猫数据2', '市场数据2', '生意参谋数据2', '生意经2', '属性设置2',]:
662
+ # s = OptimizeDatas(username=username, password=password, host=host, port=port)
663
+ # s.db_name = db_name
664
+ # s.rename_column()
665
+ # s.client.close()
666
+ """
667
+ self.client = pymongo.MongoClient(self.link) # 连接数据库
668
+ database_names = self.client.list_database_names() # 所有数据库名称
669
+ collections = self.my_collection_names(db_name=self.db_name) # 所有集合名称
670
+ for collection_name in collections:
671
+ collection = self.client[self.db_name].get_collection(collection_name)
672
+ has_date_field = collection.find_one({})
673
+ for key, value in has_date_field.items():
674
+ if key.endswith('_'):
675
+ new_name = re.sub(r'_+$', '', key)
676
+ query = {key: {'$exists': True}}
677
+ update = {'$rename': {key: new_name}}
678
+ collection.update_many(query, update)
679
+
658
680
 
659
681
  def upload_one_dir():
660
682
  username, password, host, port = get_myconf.select_config_values(target_service='home_lx', database='mongodb')
@@ -690,3 +712,16 @@ if __name__ == '__main__':
690
712
  # main()
691
713
  username, password, host, port = get_myconf.select_config_values(target_service='home_lx', database='mongodb')
692
714
  print(username, password, host, port)
715
+
716
+ # for db_name in [
717
+ # '京东数据2',
718
+ # '天猫数据2',
719
+ # '市场数据2',
720
+ # '生意参谋数据2',
721
+ # '生意经2',
722
+ # '属性设置2',
723
+ # ]:
724
+ # s = OptimizeDatas(username=username, password=password, host=host, port=port)
725
+ # s.db_name = db_name
726
+ # s.rename_column()
727
+ # s.client.close()
mdbq/mysql/mysql.py CHANGED
@@ -81,8 +81,11 @@ class MysqlUpload:
81
81
  if '8.138.27' in str(self.host) or platform.system() == "Linux": # 阿里云 mysql 低版本不支持 0900
82
82
  cursor.execute(f"CREATE DATABASE {db_name} COLLATE utf8mb4_unicode_ci")
83
83
  self.config.update({'charset': 'utf8mb4_unicode_ci'})
84
+ if '192.168.1.100' in str(self.host):
85
+ cursor.execute(f"CREATE DATABASE {db_name}")
84
86
  else:
85
87
  cursor.execute(f"CREATE DATABASE {db_name} COLLATE utf8mb4_0900_ai_ci")
88
+ # cursor.execute(f"CREATE DATABASE {db_name}")
86
89
  connection.commit()
87
90
  print(f"创建Database: {db_name}")
88
91
  except Exception as e:
@@ -140,23 +143,36 @@ class MysqlUpload:
140
143
  # connection.commit()
141
144
 
142
145
  # 4. 更新插入数据
143
- try:
144
- now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S ")
145
- print(f'{now}正在更新 mysql ({self.host}:{self.port}) {db_name}/{tabel_name}')
146
- engine = create_engine(
147
- f'mysql+pymysql://{self.username}:{self.password}@{self.host}:{self.port}/{db_name}'
148
- )
149
- df.to_sql(tabel_name, con=engine, if_exists='append', index=False)
150
- except Exception as e: # 如果异常则回滚
146
+ now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S ")
147
+ print(f'{now}正在更新 mysql ({self.host}:{self.port}) {db_name}/{tabel_name}')
148
+ if str(self.host) == '192.168.1.100': # 群晖服务器
151
149
  try:
152
- connection.rollback()
153
- print(f'{e}, 发生异常,正在重试...')
154
- # df = df.replace([np.inf, -np.inf], 0)
155
- df.to_sql(tabel_name, con=engine, if_exists='append', index=False)
150
+ datas = df.to_dict('records')
151
+ for data in datas:
152
+ cols = ', '.join(data.keys())
153
+ values = ', '.join([f'"{v}"' for v in data.values()])
154
+ sql = f"INSERT INTO {tabel_name} ({cols}) VALUES ({values})"
155
+ cursor.execute(sql)
156
+ connection.commit()
156
157
  except Exception as e:
157
- now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S ")
158
- print(f'{now}{db_name}/{tabel_name}数据异常, 正在回滚: {e}')
158
+ print(e)
159
159
  connection.rollback()
160
+ else: # 其他服务器
161
+ try:
162
+ engine = create_engine(
163
+ f'mysql+pymysql://{self.username}:{self.password}@{self.host}:{self.port}/{db_name}'
164
+ )
165
+ df.to_sql(tabel_name, con=engine, if_exists='append', index=False)
166
+ except Exception as e: # 如果异常则回滚
167
+ try:
168
+ connection.rollback()
169
+ print(f'{e}, 发生异常,正在重试...')
170
+ # df = df.replace([np.inf, -np.inf], 0)
171
+ df.to_sql(tabel_name, con=engine, if_exists='append', index=False)
172
+ except Exception as e:
173
+ now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S ")
174
+ print(f'{now}{db_name}/{tabel_name}数据异常, 正在回滚: {e}')
175
+ connection.rollback()
160
176
  finally:
161
177
  connection.close()
162
178
 
@@ -545,6 +561,33 @@ class OptimizeDatas:
545
561
  start_date += datetime.timedelta(days=1)
546
562
  return date_list
547
563
 
564
+ def rename_column(self):
565
+ """ 批量修改数据库的列名 """
566
+ """
567
+ # for db_name in ['京东数据2', '天猫数据2', '市场数据2', '生意参谋数据2', '生意经2', '属性设置2',]:
568
+ # s = OptimizeDatas(username=username, password=password, host=host, port=port)
569
+ # s.db_name = db_name
570
+ # s.rename_column()
571
+ """
572
+ tables = self.table_list(db_name=self.db_name)
573
+ for table_dict in tables:
574
+ for key, table_name in table_dict.items():
575
+ self.config.update({'database': self.db_name}) # 添加更新 config 字段
576
+ self.connection = pymysql.connect(**self.config)
577
+ with self.connection.cursor() as cursor:
578
+ cursor.execute(f"SHOW FULL COLUMNS FROM {table_name}") # 查询数据表的列信息
579
+ columns = cursor.fetchall()
580
+ columns = [{column['Field']: column['Type']} for column in columns]
581
+ for column in columns:
582
+ for key, value in column.items():
583
+ if key.endswith('_'):
584
+ new_name = re.sub(r'_+$', '', key)
585
+ sql = f"ALTER TABLE {table_name} CHANGE COLUMN {key} {new_name} {value}"
586
+ cursor.execute(sql)
587
+ self.connection.commit()
588
+ if self.connection:
589
+ self.connection.close()
590
+
548
591
 
549
592
  def year_month_day(start_date, end_date):
550
593
  """
@@ -594,3 +637,6 @@ def download_datas(tabel_name, save_path, start_date):
594
637
  if __name__ == '__main__':
595
638
  username, password, host, port = get_myconf.select_config_values(target_service='home_lx', database='mysql')
596
639
  print(username, password, host, port)
640
+
641
+
642
+
mdbq/mysql/s_query.py CHANGED
@@ -14,6 +14,9 @@ import calendar
14
14
  from mdbq.config import get_myconf
15
15
 
16
16
  warnings.filterwarnings('ignore')
17
+ """
18
+ 程序专门用来下载数据库数据, 并返回 df, 不做清洗数据操作;
19
+ """
17
20
 
18
21
 
19
22
  class QueryDatas:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 0.1.0
3
+ Version: 0.1.2
4
4
  Home-page: https://pypi.org/project/mdbsql
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -1,26 +1,26 @@
1
1
  mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
2
2
  mdbq/__version__.py,sha256=y9Mp_8x0BCZSHsdLT_q5tX9wZwd5QgqrSIENLrb6vXA,62
3
3
  mdbq/aggregation/__init__.py,sha256=EeDqX2Aml6SPx8363J-v1lz0EcZtgwIBYyCJV6CcEDU,40
4
- mdbq/aggregation/aggregation.py,sha256=EbV1f3ExlBcbtAWE1R5qcNx5AE9NCZL3ENMqVCy5zHg,55618
5
- mdbq/aggregation/query_data.py,sha256=9E4dbeQAq7r1srlszP27X3jFiKtMmFc6lP965GHjQms,13368
4
+ mdbq/aggregation/aggregation.py,sha256=n2MoKwN1ltJD3juu59zkhc7PGUMDTkn0zCcZGs8RnXI,56775
5
+ mdbq/aggregation/query_data.py,sha256=RaZ0NJwTeNhSgpyj9eUjEVc6Kp83etr6luyzSf9U5WE,14465
6
6
  mdbq/bdup/__init__.py,sha256=AkhsGk81SkG1c8FqDH5tRq-8MZmFobVbN60DTyukYTY,28
7
7
  mdbq/bdup/bdup.py,sha256=LAV0TgnQpc-LB-YuJthxb0U42_VkPidzQzAagan46lU,4234
8
8
  mdbq/clean/__init__.py,sha256=A1d6x3L27j4NtLgiFV5TANwEkLuaDfPHDQNrPBbNWtU,41
9
9
  mdbq/clean/data_clean.py,sha256=33OmeQFl9AW21P5EOay52W_S8DF96H5oHwCg4fSuBxA,85359
10
10
  mdbq/company/__init__.py,sha256=qz8F_GsP_pMB5PblgJAUAMjasuZbOEp3qQOCB39E8f0,21
11
- mdbq/company/copysh.py,sha256=0exynzeqf85gCBQXAgKycVxddMhr0TjkFcBP_NK0QTA,15609
11
+ mdbq/company/copysh.py,sha256=l6D73fpVjnQIpf2qhMPIagqk-RIqUdc-4RZlbx-EO-s,15612
12
12
  mdbq/config/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
13
- mdbq/config/get_myconf.py,sha256=q6Pylsnh4-MsHP9JcX8IdlnGVWikz9hio1HI_qh6Wvs,6171
13
+ mdbq/config/get_myconf.py,sha256=9v3xebfcS1tptxpvk3_tGxfXjAehGVCveYe4iRUzLQQ,6372
14
14
  mdbq/config/update_conf.py,sha256=YjGjjRchu5BcrmLJkoLjHEF2TbGOmsgCWX4LroXOYWQ,3455
15
15
  mdbq/dataframe/__init__.py,sha256=2HtCN8AdRj53teXDqzysC1h8aPL-mMFy561ESmhehGQ,22
16
- mdbq/dataframe/converter.py,sha256=VLG6Y0Ca_OkgpVkHDQU6AHSxGqISx6GsGiyDGHkXq0g,2668
16
+ mdbq/dataframe/converter.py,sha256=5hrGx-lPVwYLuyZNOHf6K7O9_AAKZ7mCp0MHxlxSCnk,2816
17
17
  mdbq/log/__init__.py,sha256=Mpbrav0s0ifLL7lVDAuePEi1hJKiSHhxcv1byBKDl5E,15
18
18
  mdbq/log/mylogger.py,sha256=oaT7Bp-Hb9jZt52seP3ISUuxVcI19s4UiqTeouScBO0,3258
19
19
  mdbq/mongo/__init__.py,sha256=SILt7xMtQIQl_m-ik9WLtJSXIVf424iYgCfE_tnQFbw,13
20
- mdbq/mongo/mongo.py,sha256=AK028yQWPajf_A-PYY-NJTfWiGQJDpBwPY3aS6cOcHk,30431
20
+ mdbq/mongo/mongo.py,sha256=q0B4wXDSTtXg_vMN7MPh6zdxl6tT68tM74LmdVNQQek,31892
21
21
  mdbq/mysql/__init__.py,sha256=A_DPJyAoEvTSFojiI2e94zP0FKtCkkwKP1kYUCSyQzo,11
22
- mdbq/mysql/mysql.py,sha256=HX3keNO-Hbpra3XNJtWbBwsD7pLHiKfM4ZQaSBksAkk,28149
23
- mdbq/mysql/s_query.py,sha256=bRnW8Cpy4fSsbMhzGCvjiK2kin9uamVumJC3nLAyjMg,5213
22
+ mdbq/mysql/mysql.py,sha256=Ul-QJHFKjSHu2uxk_CRswVvCV-oFtN-t-3wIOoKfLLg,30550
23
+ mdbq/mysql/s_query.py,sha256=pzxRGBRP3Ku4oVLqfpMd1VWWct3YlxxnVavtKa9kgSM,5302
24
24
  mdbq/mysql/year_month_day.py,sha256=VgewoE2pJxK7ErjfviL_SMTN77ki8GVbTUcao3vFUCE,1523
25
25
  mdbq/other/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
26
26
  mdbq/other/porxy.py,sha256=UHfgEyXugogvXgsG68a7QouUCKaohTKKkI4RN-kYSdQ,4961
@@ -30,7 +30,7 @@ mdbq/pbix/__init__.py,sha256=Trtfaynu9RjoTyLLYBN2xdRxTvm_zhCniUkVTAYwcjo,24
30
30
  mdbq/pbix/pbix_refresh.py,sha256=JUjKW3bNEyoMVfVfo77UhguvS5AWkixvVhDbw4_MHco,2396
31
31
  mdbq/pbix/refresh_all.py,sha256=wulHs4rivf4Mi0Pii2QR5Nk9-TBcvSwnCB_WH9QULKE,5939
32
32
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
33
- mdbq-0.1.0.dist-info/METADATA,sha256=627YqHp57NZ-o4gylzObjtKU3DldI5Dl4rNpldzb8JU,245
34
- mdbq-0.1.0.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91
35
- mdbq-0.1.0.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
36
- mdbq-0.1.0.dist-info/RECORD,,
33
+ mdbq-0.1.2.dist-info/METADATA,sha256=w8Ljl-Bift6EdQ_GXXbZka_9ai6N4UWEa1X-rwrJDo4,245
34
+ mdbq-0.1.2.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91
35
+ mdbq-0.1.2.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
36
+ mdbq-0.1.2.dist-info/RECORD,,
File without changes