mdbq 1.3.1__py3-none-any.whl → 1.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -599,11 +599,11 @@ class DatabaseUpdate:
599
599
  }
600
600
  )
601
601
 
602
- def upload_df(self, service_databases=[{}]):
602
+ def upload_df(self, service_databases=[{}], path=None, system_name=None):
603
603
  """
604
604
  将清洗后的 df 上传数据库
605
605
  """
606
- df_to_json = df_types.DataTypes() # json 文件, 包含数据的 dtypes 信息
606
+ df_to_json = df_types.DataTypes(path=path, system_name=system_name) # json 文件, 包含数据的 dtypes 信息
607
607
  for service_database in service_databases:
608
608
  for service_name, database in service_database.items():
609
609
  # print(service_name, database)
@@ -803,7 +803,7 @@ class DatabaseUpdate:
803
803
  return df
804
804
 
805
805
 
806
- def upload_dir(path, db_name, collection_name, dbs={'mysql': True, 'mongodb': True}):
806
+ def upload_dir(path, db_name, collection_name, dbs={'mysql': True, 'mongodb': True}, json_path=None, system_name=None):
807
807
  """ 上传一个文件夹到 mysql 或者 mongodb 数据库 """
808
808
  if not os.path.isdir(path):
809
809
  print(f'{os.path.splitext(os.path.basename(__file__))[0]}.upload_dir: 函数只接受文件夹路径,不是一个文件夹: {path}')
@@ -845,7 +845,7 @@ def upload_dir(path, db_name, collection_name, dbs={'mysql': True, 'mongodb': Tr
845
845
  )
846
846
 
847
847
  # 从本地 json 文件从读取 df 的数据类型信息
848
- df_to_json = df_types.DataTypes()
848
+ df_to_json = df_types.DataTypes(path=json_path, system_name=system_name)
849
849
  dtypes = df_to_json.load_dtypes(
850
850
  db_name=db_name,
851
851
  collection_name=collection_name,
@@ -976,13 +976,13 @@ def test2():
976
976
  # {'home_lx': 'mongodb'},
977
977
  {'home_lx': 'mysql'},
978
978
  # {'nas': 'mysql'}
979
- ])
979
+ ], path=None, system_name=None)
980
980
 
981
981
 
982
982
  if __name__ == '__main__':
983
983
  # username, password, host, port = get_myconf.select_config_values(target_service='nas', database='mysql')
984
984
  # print(username, password, host, port)
985
- # file_dir(one_file=False)
985
+ file_dir(one_file=False)
986
986
  # one_file_to_mysql(
987
987
  # file='/Users/xigua/数据中心/原始文件2/推广报表/品销宝/账户/账户_明星店铺报表_2023-11-13_2023-12-12.csv',
988
988
  # db_name='推广数据2',
@@ -1001,11 +1001,12 @@ if __name__ == '__main__':
1001
1001
 
1002
1002
  # test2()
1003
1003
 
1004
- file = '/Users/xigua/Downloads/余额查询.csv'
1005
- df = pd.read_csv(file, encoding='utf-8_sig', header=0, na_filter=False)
1006
- username, password, host, port = get_myconf.select_config_values(target_service='company', database='mysql')
1007
- m = mysql.MysqlUpload(username=username, password=password, host=host, port=port)
1008
- m.df_to_mysql(df=df, db_name='test', table_name='增量更新测试',
1009
- drop_dup=False,
1010
- icm_update=['日期', '推广费余额']
1011
- )
1004
+ # file = ''
1005
+ # df = pd.read_csv(file, encoding='utf-8_sig', header=0, na_filter=False)
1006
+ # username, password, host, port = get_myconf.select_config_values(target_service='company', database='mysql')
1007
+ # m = mysql.MysqlUpload(username=username, password=password, host=host, port=port)
1008
+ # m.df_to_mysql(df=df, db_name='test', table_name='增量更新测试',
1009
+ # drop_dup=False,
1010
+ # # icm_update=['日期', '推广费余额'],
1011
+ # system_name='company',
1012
+ # )
@@ -48,7 +48,11 @@ class DataTypes:
48
48
  }
49
49
  }
50
50
  self.path = set_support.SetSupport(dirname='support').dirname
51
- self.json_file = os.path.join(self.path, 'df_types.json')
51
+ self.system_name = None
52
+ if self.system_name:
53
+ self.json_file = os.path.join(self.path, f'{self.system_name}_df_types.json')
54
+ else:
55
+ self.json_file = os.path.join(self.path, 'df_types.json')
52
56
  if not os.path.isdir(self.path):
53
57
  os.makedirs(self.path)
54
58
  if not os.path.isfile(self.json_file):
@@ -38,7 +38,7 @@ class DataTypes:
38
38
  数据简介: 记录 dataframe 或者数据库的列信息(dtypes),可以记录其信息或者加载相关信息用于入库使用,
39
39
  第一字段为分类(如 dataframe/mysql),第二字段为数据库名,第三字段为集合名,第四段列名及其数据类型
40
40
  """
41
- def __init__(self):
41
+ def __init__(self, path=None, system_name=None):
42
42
  self.datas = {
43
43
  '_json统计':
44
44
  {
@@ -46,11 +46,16 @@ class DataTypes:
46
46
  '数据库量': 0,
47
47
  '集合数量': 0,
48
48
  '字段量': 0,
49
- '数据简介': '记录 dataframe 或者数据库的列信息(dtypes)',
49
+ '数据简介': '记录数据库各表的数据类型信息',
50
50
  }
51
51
  }
52
- self.path = set_support.SetSupport(dirname='support').dirname
53
- self.json_file = os.path.join(self.path, 'mysql_types.json')
52
+ self.path = path
53
+ if not self.path:
54
+ self.path = set_support.SetSupport(dirname='support').dirname
55
+ self.system_name = system_name
56
+ if not self.system_name:
57
+ self.system_name = 'home_lx'
58
+ self.json_file = os.path.join(self.path, f'mysql_types_{self.system_name}.json')
54
59
  if not os.path.isdir(self.path):
55
60
  os.makedirs(self.path)
56
61
  if not os.path.isfile(self.json_file):
@@ -149,12 +154,10 @@ class DataTypes:
149
154
  return {}, cl, None, None # 返回这些结果的目的是等添加完列再写 json 文件才能读到 types 信息
150
155
 
151
156
 
152
- def mysql_all_dtypes(db_name=None, table_name=None, path=None):
157
+ def mysql_all_dtypes(db_name=None, table_name=None, path=None, system_name=None):
153
158
  """
154
159
  更新笔记本 mysql 中所有数据库的 dtypes 信息到本地 json
155
160
  """
156
- if not path:
157
- path = set_support.SetSupport(dirname='support').dirname
158
161
 
159
162
  username, password, host, port = get_myconf.select_config_values(target_service='home_lx', database='mysql')
160
163
  config = {
@@ -205,8 +208,7 @@ def mysql_all_dtypes(db_name=None, table_name=None, path=None):
205
208
  connection.close()
206
209
  time.sleep(0.5)
207
210
 
208
- d = DataTypes()
209
- d.json_file = os.path.join(path, f'mysql_types.json') # # json 保存位置
211
+ d = DataTypes(path=path, system_name=system_name)
210
212
  for result in results:
211
213
  for db_n, table_n in result.items():
212
214
  # print(db_n, table_n, db_name, table_name)
@@ -218,7 +220,6 @@ def mysql_all_dtypes(db_name=None, table_name=None, path=None):
218
220
  continue
219
221
  # 如果 db_name 和 table_name 都不指定,则下载所有数据库的所有数据表
220
222
  print(f'获取列信息 数据库: < {db_n} >, 数据表: < {table_n} >')
221
- # d.mysql_dtypes_to_json(db_name=db_n, table_name=table_n, path=path)
222
223
  sq = s_query.QueryDatas(username=username, password=password, host=host, port=port)
223
224
  # 获取数据表的指定列, 返回列表
224
225
  # [{'视频bv号': 'BV1Dm4y1S7BU', '下载进度': 1}, {'视频bv号': 'BV1ov411c7US', '下载进度': 1}]
@@ -242,5 +243,5 @@ def mysql_all_dtypes(db_name=None, table_name=None, path=None):
242
243
  if __name__ == '__main__':
243
244
  # 更新 mysql 中所有数据库的 dtypes 信息到本地 json
244
245
  mysql_all_dtypes(
245
- path='/Users/xigua/Downloads',
246
+ path='/Users/xigua/Downloads', system_name='company',
246
247
  )
@@ -495,7 +495,7 @@ class GroupBy:
495
495
  df.to_excel(os.path.join(path, filename + '.xlsx'), index=index, header=header, engine=engine, freeze_panes=freeze_panes)
496
496
 
497
497
 
498
- def data_aggregation(service_databases=[{}], months=1):
498
+ def data_aggregation(service_databases=[{}], months=1, system_name=None,):
499
499
  """
500
500
  1. 从数据库中读取数据
501
501
  2. 数据聚合清洗
@@ -558,7 +558,8 @@ def data_aggregation(service_databases=[{}], months=1):
558
558
  db_name=db_name,
559
559
  table_name=table_name,
560
560
  drop_dup=False,
561
- icm_update=unique_key_list
561
+ icm_update=unique_key_list,
562
+ system_name=system_name,
562
563
  ) # 3. 回传数据库
563
564
  res = g.performance(bb_tg=True) # 盈亏表,依赖其他表,单独做
564
565
  m.df_to_mysql(
@@ -566,7 +567,8 @@ def data_aggregation(service_databases=[{}], months=1):
566
567
  db_name='聚合数据',
567
568
  table_name='_全店商品销售',
568
569
  drop_dup=False,
569
- icm_update=['日期', '商品id'] # 设置唯一主键
570
+ icm_update=['日期', '商品id'], # 设置唯一主键
571
+ system_name = system_name,
570
572
  )
571
573
  res = g.performance(bb_tg=False) # 盈亏表,依赖其他表,单独做
572
574
  m.df_to_mysql(
@@ -574,12 +576,13 @@ def data_aggregation(service_databases=[{}], months=1):
574
576
  db_name='聚合数据',
575
577
  table_name='_推广商品销售',
576
578
  drop_dup=False,
577
- icm_update=['日期', '商品id'] # 设置唯一主键
579
+ icm_update=['日期', '商品id'], # 设置唯一主键
580
+ system_name=system_name,
578
581
  )
579
582
 
580
583
  # optimize_data.op_data(service_databases=service_databases, days=3650) # 立即启动对聚合数据的清理工作
581
584
 
582
585
 
583
586
  if __name__ == '__main__':
584
- data_aggregation(service_databases=[{'company': 'mysql'}], months=1)
587
+ data_aggregation(service_databases=[{'company': 'mysql'}], months=1, system_name='company')
585
588
  # optimize_data.op_data(service_databases=[{'company': 'mysql'}], days=3650) # 立即启动对聚合数据的清理工作
mdbq/mysql/mysql.py CHANGED
@@ -57,7 +57,7 @@ class MysqlUpload:
57
57
  }
58
58
  self.filename = None
59
59
 
60
- def df_to_mysql(self, df, table_name, db_name='远程数据源', icm_update=[], icm_up=[], df_sql=False, drop_dup=True, drop_duplicates=False, filename=None, count=None):
60
+ def df_to_mysql(self, df, table_name, db_name='远程数据源', icm_update=[], icm_up=[], df_sql=False, drop_dup=True, drop_duplicates=False, filename=None, count=None, json_path=None, system_name=None):
61
61
  """
62
62
  将 df 写入数据库
63
63
  db_name: 数据库名称
@@ -115,44 +115,42 @@ class MysqlUpload:
115
115
  connection = pymysql.connect(**self.config) # 重新连接数据库
116
116
  with connection.cursor() as cursor:
117
117
  # 1. 查询表, 不存在则创建一个空表
118
- sql = f"SHOW TABLES LIKE '{table_name}';" # 有特殊字符不需转义
119
- cursor.execute(sql)
118
+ sql = "SHOW TABLES LIKE %s;" # 有特殊字符不需转义
119
+ cursor.execute(sql, (table_name))
120
120
  if not cursor.fetchone():
121
- sql = f"CREATE TABLE IF NOT EXISTS `{table_name}` (id INT AUTO_INCREMENT PRIMARY KEY)"
121
+ sql = f"CREATE TABLE IF NOT EXISTS `{table_name}` (id INT AUTO_INCREMENT PRIMARY KEY);"
122
122
  cursor.execute(sql)
123
123
  print(f'创建 mysql 表: {table_name}')
124
124
 
125
125
  # 2. 列数据类型转换,将 df 数据类型转换为 mysql 的数据类型
126
- dtypes, cl, db_n, tb_n = self.convert_dtypes(df=df, db_name=db_name, table_name=table_name)
126
+ dtypes, cl, db_n, tb_n = self.convert_dtypes(df=df, db_name=db_name, table_name=table_name, path=json_path, system_name=system_name)
127
127
 
128
128
  # 有特殊字符不需转义
129
- sql = f"SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = '{db_name}' AND TABLE_NAME = '{table_name}';"
130
- cursor.execute(sql)
129
+ sql = "SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = %s AND TABLE_NAME = %s;"
130
+ cursor.execute(sql, (db_name, table_name))
131
131
  col_exist = [item['COLUMN_NAME'] for item in cursor.fetchall()]
132
132
  cols = df.columns.tolist()
133
133
  col_not_exist = [col for col in cols if col not in col_exist]
134
134
  # 检查列,不存在则新建列
135
135
  if col_not_exist: # 数据表中不存在的列
136
136
  for col in col_not_exist:
137
- try:
138
- # 创建列,需转义
139
- sql = f"ALTER TABLE `{table_name}` ADD COLUMN `{col}` {dtypes[col]};"
140
- cursor.execute(sql)
141
- print(f"添加列: {col}({dtypes[col]})") # 添加列并指定数据类型
142
-
143
- # 创建索引
144
- if col == '日期':
145
- cursor.execute(f"SHOW INDEXES FROM `{table_name}` WHERE `Column_name` = '{col}'")
146
- result = cursor.fetchone() # 检查索引是否存在
147
- if not result:
148
- cursor.execute(f"CREATE INDEX index_name ON `{table_name}`(`{col}`)")
149
- except Exception as e:
150
- print(f'{self.filename}: {e}')
137
+ # 创建列,需转义
138
+ sql = f"ALTER TABLE `{table_name}` ADD COLUMN `{col}` {dtypes[col]};"
139
+ cursor.execute(sql)
140
+ print(f"添加列: {col}({dtypes[col]})") # 添加列并指定数据类型
141
+
142
+ # 创建索引
143
+ if col == '日期':
144
+ sql = f"SHOW INDEXES FROM `{table_name}` WHERE `Column_name` = %s"
145
+ cursor.execute(sql, (col))
146
+ result = cursor.fetchone() # 检查索引是否存在
147
+ if not result:
148
+ cursor.execute(f"CREATE INDEX index_name ON `{table_name}`(`{col}`)")
151
149
  connection.commit() # 提交事务
152
150
 
153
151
  if df_sql:
154
152
  now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S ")
155
- print(f'{now}正在更新 mysql ({self.host}:{self.port}) {db_name}/{table_name}, {count}, {self.filename}')
153
+ print(f'{now}正在更新: mysql ({self.host}:{self.port}) {db_name}/{table_name}, {count}, {self.filename}')
156
154
  engine = create_engine(
157
155
  f"mysql+pymysql://{self.username}:{self.password}@{self.host}:{self.port}/{db_name}") # 创建数据库引擎
158
156
  df.to_sql(
@@ -169,11 +167,11 @@ class MysqlUpload:
169
167
  # 返回这些结果的目的是等添加完列再写 json 文件才能读到 types 信息
170
168
  # ⚠️ mysql_all_dtypes 函数默认只读取 home_lx 的数据库信息,不会读取其他系统
171
169
  if cl and db_n and tb_n:
172
- mysql_types.mysql_all_dtypes(db_name=db_name, table_name=table_name) # 更新一个表的 dtypes
170
+ mysql_types.mysql_all_dtypes(db_name=db_name, table_name=table_name, system_name=system_name) # 更新一个表的 dtypes
173
171
  elif cl and db_n:
174
- mysql_types.mysql_all_dtypes(db_name=db_name) # 更新一个数据库的 dtypes
172
+ mysql_types.mysql_all_dtypes(db_name=db_name, system_name=system_name) # 更新一个数据库的 dtypes
175
173
  elif cl:
176
- mysql_types.mysql_all_dtypes() # 更新所有数据库所有数据表的 dtypes 信息到本地 json
174
+ mysql_types.mysql_all_dtypes(system_name=system_name) # 更新所有数据库所有数据表的 dtypes 信息到本地 json
177
175
 
178
176
  # 4. 移除指定日期范围内的数据,仅限于聚合数据使用,其他情况不要设置
179
177
  if drop_duplicates and '日期' in df.columns.tolist():
@@ -195,20 +193,21 @@ class MysqlUpload:
195
193
  try:
196
194
  cols = ', '.join(f"`{item}`" for item in data.keys()) # 列名需要转义
197
195
  # data.update({item: f"{data[item]}" for item in data.keys()}) # 全部值转字符, 不是必须的
198
- values = ', '.join([f"'{item}'" for item in data.values()]) # 值要加单引号 ''
196
+ values = ', '.join([f'"{item}"' for item in data.values()]) # 值要加引号
199
197
  condition = []
200
198
  for k, v in data.items():
201
- condition += [f"`{k}` = '{v}'"]
199
+ condition += [f'`{k}` = "{v}"']
202
200
  condition = ' AND '.join(condition) # 构建查询条件
203
201
  # print(condition)
204
202
 
205
203
  if drop_dup: # 查重插入
206
- sql = f"SELECT {cols} FROM `{table_name}` WHERE {condition}"
204
+ sql = "SELECT %s FROM %s WHERE %s" % (cols, table_name, condition)
207
205
  # sql = f"SELECT {cols} FROM `{table_name}` WHERE `创建时间` = '2014-09-19 14:32:33'"
206
+ # print(sql)
208
207
  cursor.execute(sql)
209
208
  result = cursor.fetchall() # 获取查询结果, 有结果返回 list 表示数据已存在(不重复插入),没有则返回空 tuple
210
209
  if not result: # 数据不存在则插入
211
- sql = f"INSERT INTO `{table_name}` ({cols}) VALUES ({values});"
210
+ sql = f"INSERT INTO `{table_name}` ({cols}) VALUES (%s);" % (values)
212
211
  cursor.execute(sql)
213
212
  # else:
214
213
  # print(f'重复数据不插入: {condition[:50]}...')
@@ -224,7 +223,7 @@ class MysqlUpload:
224
223
  unique_keys = ', '.join(f"`{item}`" for item in update_col) # 列名需要转义
225
224
  condition = []
226
225
  for up_col in icm_update:
227
- condition += [f"`{up_col}` = '{data[up_col]}'"]
226
+ condition += [f'`{up_col}` = "{data[up_col]}"']
228
227
  condition = ' AND '.join(condition) # condition值示例: `品销宝余额` = '2930.73' AND `短信剩余` = '67471'
229
228
  sql = f"SELECT {unique_keys} FROM `{table_name}` WHERE {condition}"
230
229
  # print(sql)
@@ -251,7 +250,7 @@ class MysqlUpload:
251
250
  not_change_col += [item for item in update_col if item != col]
252
251
  # change_values 是 df 传进来且和数据库对比后,发生了变化的数据,值示例: [`品销宝余额` = '9999.0', `短信剩余` = '888']
253
252
  if change_values: # change_values 有数据返回,表示值需要更新
254
- not_change_values = [f"`{col}` = '{str(data[col])}'" for col in not_change_col]
253
+ not_change_values = [f'`{col}` = "{str(data[col])}"' for col in not_change_col]
255
254
  not_change_values = ' AND '.join(not_change_values) # 示例: `短信剩余` = '888' AND `test1` = '93'
256
255
  # print(change_values, not_change_values)
257
256
  condition += f' AND {not_change_values}' # 重新构建完整的查询条件,将未发生变化的列加进查询条件
@@ -283,7 +282,7 @@ class MysqlUpload:
283
282
  # # cursor.execute(sql)
284
283
 
285
284
  else:
286
- sql = f"INSERT INTO `{table_name}` ({cols}) VALUES ({values});"
285
+ sql = f"INSERT INTO `{table_name}` ({cols}) VALUES (%s);" % (values)
287
286
  cursor.execute(sql)
288
287
  except Exception as e:
289
288
  # print(data)
@@ -293,7 +292,7 @@ class MysqlUpload:
293
292
  connection.commit() # 提交事务
294
293
  connection.close()
295
294
 
296
- def convert_dtypes(self, df, db_name, table_name):
295
+ def convert_dtypes(self, df, db_name, table_name, path=None, system_name=None):
297
296
  """
298
297
  根据本地 json 转换 df 的类型为 mysql 专有的数据类型
299
298
  可能不存在本地 json 文件 (函数按指定规则转换并更新 json)
@@ -301,7 +300,7 @@ class MysqlUpload:
301
300
  """
302
301
  cols = df.columns.tolist()
303
302
  # path = set_support.SetSupport(dirname='support').dirname
304
- d = mysql_types.DataTypes()
303
+ d = mysql_types.DataTypes(path=path, system_name=system_name)
305
304
  # 从本地文件中读取 dtype 信息
306
305
  dtypes, cl, db_n, tb_n = d.load_dtypes(cl='mysql', db_name=db_name, table_name=table_name)
307
306
  # 可能会因为没有 json 文件, 返回 None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mdbq
3
- Version: 1.3.1
3
+ Version: 1.3.3
4
4
  Home-page: https://pypi.org/project/mdbsql
5
5
  Author: xigua,
6
6
  Author-email: 2587125111@qq.com
@@ -1,11 +1,11 @@
1
1
  mdbq/__init__.py,sha256=Il5Q9ATdX8yXqVxtP_nYqUhExzxPC_qk_WXQ_4h0exg,16
2
2
  mdbq/__version__.py,sha256=y9Mp_8x0BCZSHsdLT_q5tX9wZwd5QgqrSIENLrb6vXA,62
3
3
  mdbq/aggregation/__init__.py,sha256=EeDqX2Aml6SPx8363J-v1lz0EcZtgwIBYyCJV6CcEDU,40
4
- mdbq/aggregation/aggregation.py,sha256=E0LuoybFFUEu8JTArYVVi5tAtuPxZTMm4CHkfAd92JQ,57627
5
- mdbq/aggregation/df_types.py,sha256=rHLIgv82PJSFmDvXkZyOJAffXkFyyMyFO23w9tUt8EQ,7525
6
- mdbq/aggregation/mysql_types.py,sha256=_XIqpaX_qmqolFlGywMYfvBn32u8MbPCaX6n7rQOVRQ,10634
4
+ mdbq/aggregation/aggregation.py,sha256=vDUHXbRlBLGQDrxJ_r0z3A31QyR_7zUvZ-Booa_NPFY,57813
5
+ mdbq/aggregation/df_types.py,sha256=nWadlcioysdFtelot3W6TWCuVntw6vWMOYYnAbGHnS4,7694
6
+ mdbq/aggregation/mysql_types.py,sha256=jTQaCrDy9hj36GIImshXANyQNFAvVKJTybkzJSh-qJ8,10671
7
7
  mdbq/aggregation/optimize_data.py,sha256=jLAWtxPUuhpo4XTVrhKtT4xK3grs7r73ePQfLhxlu1I,779
8
- mdbq/aggregation/query_data.py,sha256=eLsxev_fEW_7N-uqTBsINLZbHP6B0wqjxVzIL4-SK3c,27131
8
+ mdbq/aggregation/query_data.py,sha256=wtXd7lZmlJhch8Dt3sTgsrPVTMeaV37Fd9QvsQS87w8,27308
9
9
  mdbq/bdup/__init__.py,sha256=AkhsGk81SkG1c8FqDH5tRq-8MZmFobVbN60DTyukYTY,28
10
10
  mdbq/bdup/bdup.py,sha256=LAV0TgnQpc-LB-YuJthxb0U42_VkPidzQzAagan46lU,4234
11
11
  mdbq/clean/__init__.py,sha256=A1d6x3L27j4NtLgiFV5TANwEkLuaDfPHDQNrPBbNWtU,41
@@ -24,7 +24,7 @@ mdbq/log/mylogger.py,sha256=oaT7Bp-Hb9jZt52seP3ISUuxVcI19s4UiqTeouScBO0,3258
24
24
  mdbq/mongo/__init__.py,sha256=SILt7xMtQIQl_m-ik9WLtJSXIVf424iYgCfE_tnQFbw,13
25
25
  mdbq/mongo/mongo.py,sha256=v9qvrp6p1ZRWuPpbSilqveiE0FEcZF7U5xUPI0RN4xs,31880
26
26
  mdbq/mysql/__init__.py,sha256=A_DPJyAoEvTSFojiI2e94zP0FKtCkkwKP1kYUCSyQzo,11
27
- mdbq/mysql/mysql.py,sha256=MWDvOkrkZNJuPKOcW5wox8FWIfFUT20Zra5tZY7chnc,44178
27
+ mdbq/mysql/mysql.py,sha256=SF29RXZHa6bTK27vPk25qBay89d4qKPvoWwJCRmcxC0,44313
28
28
  mdbq/mysql/s_query.py,sha256=a33aYhW6gAnspIZfQ7l23ePln9-MD1f_ukypr5M0jd8,8018
29
29
  mdbq/mysql/year_month_day.py,sha256=VgewoE2pJxK7ErjfviL_SMTN77ki8GVbTUcao3vFUCE,1523
30
30
  mdbq/other/__init__.py,sha256=jso1oHcy6cJEfa7udS_9uO5X6kZLoPBF8l3wCYmr5dM,18
@@ -35,7 +35,7 @@ mdbq/pbix/__init__.py,sha256=Trtfaynu9RjoTyLLYBN2xdRxTvm_zhCniUkVTAYwcjo,24
35
35
  mdbq/pbix/pbix_refresh.py,sha256=JUjKW3bNEyoMVfVfo77UhguvS5AWkixvVhDbw4_MHco,2396
36
36
  mdbq/pbix/refresh_all.py,sha256=tgy762608HMaXWynbOURIf2UVMuSPybzrDXQnOOcnZU,6102
37
37
  mdbq/spider/__init__.py,sha256=RBMFXGy_jd1HXZhngB2T2XTvJqki8P_Fr-pBcwijnew,18
38
- mdbq-1.3.1.dist-info/METADATA,sha256=r3c6nhrIMxNU9aCxjLM5coFu3qPglqzbt1EVME5_1Ow,245
39
- mdbq-1.3.1.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
40
- mdbq-1.3.1.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
41
- mdbq-1.3.1.dist-info/RECORD,,
38
+ mdbq-1.3.3.dist-info/METADATA,sha256=9OO2EMliH5d5IPcbcP0d2ALZ8arwWvtpurYxH0B9X_k,245
39
+ mdbq-1.3.3.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
40
+ mdbq-1.3.3.dist-info/top_level.txt,sha256=2FQ-uLnCSB-OwFiWntzmwosW3X2Xqsg0ewh1axsaylA,5
41
+ mdbq-1.3.3.dist-info/RECORD,,
File without changes