rda-python-metrics 1.0.9__py3-none-any.whl → 1.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rda-python-metrics might be problematic. Click here for more details.

@@ -25,7 +25,7 @@ from . import PgIPInfo
25
25
 
26
26
  USAGE = {
27
27
  'TDSTBL' : "tdsusage",
28
- 'WEBTBL' : "webusage",
28
+ 'WEBTBL' : "wusage",
29
29
  'CDATE' : PgUtil.curdate(),
30
30
  }
31
31
 
@@ -54,6 +54,10 @@ DSIDS = {
54
54
  'ucar.cgd.ccsm4.pliomip2' : ['d651037']
55
55
  }
56
56
 
57
+ ALLIDS = list(DSIDS.keys())
58
+
59
+ WFILES = {}
60
+ WUSERS = {}
57
61
 
58
62
  #
59
63
  # main function to run this program
@@ -86,13 +90,9 @@ def main():
86
90
 
87
91
 
88
92
  PgLOG.cmdlog("fillcdgusage {}".format(' '.join(argv)))
93
+ dranges = get_date_ranges(params)
89
94
  dsids = get_dataset_ids(params['s'])
90
- if dsids:
91
- del params['s']
92
- for o in params:
93
- dranges = get_date_ranges(o, params[o])
94
- fill_cdg_usages(dsids, dranges)
95
-
95
+ if dranges and dsids: fill_cdg_usages(dsids, dranges)
96
96
  PgLOG.pglog(None, PgLOG.LOGWRN|PgLOG.SNDEML) # send email out if any
97
97
 
98
98
  sys.exit(0)
@@ -112,68 +112,78 @@ def get_dataset_ids(dsnames):
112
112
  dsids = []
113
113
  tbname = 'metadata.dataset'
114
114
  for dsname in dsnames:
115
+ if re.match(r'^all$', dsname, re.I): return get_dataset_ids(ALLIDS)
115
116
  if dsname not in DSIDS:
116
117
  PgLOG.pglog(dsname + ": Unknown CDG dataset short name", PgLOG.LOGWRN)
117
118
  continue
118
- rdaid = DSIDS[dsname]
119
119
  pgrec = PgDBI.pgget(tbname, 'id', "short_name = '{}'".format(dsname))
120
120
  if not (pgrec and pgrec['id']): continue
121
- dsid = pgrec['id']
122
- if dsid in dsids: continue
123
- dsids.append([dsid, rdaid])
124
- recursive_dataset_ids(dsid, rdaid, dsids)
121
+ rdaids = DSIDS[dsname]
122
+ cdgid = pgrec['id']
123
+ cdgids = [cdgid]
124
+ recursive_dataset_ids(cdgid, cdgids)
125
+ dsids.append([dsname, rdaids, cdgids])
125
126
 
126
127
  if not dsids: PgLOG.pglog("No Dataset Id identified to gather CDG metrics", PgLOG.LOGWRN)
127
128
 
128
129
  return dsids
129
130
 
130
131
  #
131
- # get dsids recursivley
132
+ # get cdgids recursivley
132
133
  #
133
- def recursive_dataset_ids(pdsid, rdaid, dsids):
134
+ def recursive_dataset_ids(pcdgid, cdgids):
134
135
 
135
136
  tbname = 'metadata.dataset'
136
- pgrecs = PgDBI.pgmget(tbname, 'id', "parent_dataset_id = '{}'".format(pdsid))
137
+ pgrecs = PgDBI.pgmget(tbname, 'id', "parent_dataset_id = '{}'".format(pcdgid))
137
138
  if not pgrecs: return
138
139
 
139
- for dsid in pgrecs['id']:
140
- if dsid in dsids: continue
141
- dsids.append([dsid, rdaid])
142
- recursive_dataset_ids(dsid, rdaid, dsids)
140
+ for cdgid in pgrecs['id']:
141
+ if cdgid in cdgids: continue
142
+ cdgids.append(cdgid)
143
+ recursive_dataset_ids(cdgid, cdgids)
143
144
 
144
145
  #
145
146
  # get the date ranges for given condition
146
147
  #
147
- def get_date_ranges(option, inputs):
148
+ def get_date_ranges(inputs):
148
149
 
149
150
  dranges = []
150
- for input in inputs:
151
- # get date range
152
- dates = []
153
- if option == 'N':
154
- dates[1] = USAGE['CDATE']
155
- dates[0] = PgUtil.adddate(USAGE['CDATE'], 0, 0, -int(input))
156
- elif option == 'm':
157
- tms = input.split('-')
158
- dates[0] = PgUtil.fmtdate(int(tms[0]), int(tms[1]), 1)
159
- dates[1] = PgUtil.enddate(dates[0])
160
- else:
161
- dates[0] = input + "-01-01"
162
- dates[1] = input + "-12-31"
163
- dranges.append(dates)
151
+ for opt in inputs:
152
+ for input in inputs[opt]:
153
+ # get date range
154
+ dates = []
155
+ if opt == 'N':
156
+ dates.append(PgUtil.adddate(USAGE['CDATE'], 0, 0, -int(input)))
157
+ dates.append(USAGE['CDATE'])
158
+ elif opt == 'm':
159
+ tms = input.split('-')
160
+ dates.append(PgUtil.fmtdate(int(tms[0]), int(tms[1]), 1))
161
+ dates.append(PgUtil.enddate(dates[0], 0, 'M'))
162
+ elif opt == 'Y':
163
+ dates.append(input + "-01-01")
164
+ dates.append(input + "-12-31")
165
+ dranges.append(dates)
164
166
 
165
167
  return dranges
166
168
 
167
169
  #
168
170
  # get file download records for given dsid
169
171
  #
170
- def get_dsid_records(dsid, dates):
172
+ def get_dsid_records(cdgids, dates, srdaid):
171
173
 
172
174
  gdex_dbname()
173
175
  tbname = 'metrics.file_download'
174
176
  fields = ('date_completed, remote_address, logical_file_size, logical_file_name, file_access_point_uri, user_agent_name, bytes_sent, '
175
177
  'subset_file_size, range_request, dataset_file_size, dataset_file_name, dataset_file_file_access_point_uri')
176
- cond = "dataset_id = '{}' AND completed = True AND date_completed BETWEEN '{}' AND '{}' ORDER BY date_completed".format(dsid, dates[0], dates[1])
178
+ dscnt = len(cdgids)
179
+ dscnd = "dataset_id "
180
+ if dscnt == 1:
181
+ dscnd += "= '{}'".format(cdgids[0])
182
+ else:
183
+ dscnd += "IN ('" + "','".join(cdgids) + "')"
184
+ dtcnd = "date_completed BETWEEN '{}' AND '{}'".format(dates[0], dates[1])
185
+ cond = "{} AND completed = True AND {} ORDER BY date_completed".format(dscnd, dtcnd)
186
+ PgLOG.pglog("{}: CDG query for {} at {}".format(srdaid, cond, PgLOG.current_datetime()), PgLOG.LOGWRN)
177
187
  pgrecs = PgDBI.pgmget(tbname, fields, cond)
178
188
  PgDBI.dssdb_dbname()
179
189
 
@@ -184,17 +194,20 @@ def get_dsid_records(dsid, dates):
184
194
  #
185
195
  def fill_cdg_usages(dsids, dranges):
186
196
 
187
- allcnt = awcnt = atcnt = 0
188
- for dsid in dsids:
189
- cdgid = dsid[0]
190
- rdaid = dsid[1]
191
- for dates in dranges:
192
- pgrecs = get_dsid_records(cdgid, dates)
197
+ allcnt = awcnt = atcnt = lcnt = 0
198
+ for dates in dranges:
199
+ for dsid in dsids:
200
+ lcnt += 1
201
+ dsname = dsid[0]
202
+ rdaids = dsid[1]
203
+ cdgids = dsid[2]
204
+ srdaid = '|'.join(rdaids)
205
+ pgrecs = get_dsid_records(cdgids, dates, srdaid)
193
206
  pgcnt = len(pgrecs['dataset_file_name']) if pgrecs else 0
194
207
  if pgcnt == 0:
195
- PgLOG.pglog("{}: No record found to gather CDG usage between {} and {}".format(rdaid, dates[0], dates[1]), PgLOG.LOGWRN)
208
+ PgLOG.pglog("{}: No record found to gather CDG usage between {} and {}".format(srdaid, dates[0], dates[1]), PgLOG.LOGWRN)
196
209
  continue
197
- PgLOG.pglog("{}: gather {} records for CDG usage between {} and {}".format(rdaid, pgcnt, dates[0], dates[1]), PgLOG.LOGWRN)
210
+ PgLOG.pglog("{}: Process {} records for CDG usage at {}".format(srdaid, pgcnt, PgLOG.current_datetime()), PgLOG.LOGWRN)
198
211
  tcnt = wcnt = 0
199
212
  pwkey = wrec = cdate = None
200
213
  trecs = {}
@@ -210,6 +223,11 @@ def fill_cdg_usages(dsids, dranges):
210
223
  if not url: url = pgrec['file_access_point_uri']
211
224
  ip = pgrec['remote_address']
212
225
  engine = pgrec['user_agent_name']
226
+ wfile = pgrec['dataset_file_name']
227
+ if not wfile: wfile = pgrec['logic_file_name']
228
+ wfrec = get_wfile_record(rdaids, wfile)
229
+ if not wfrec: continue
230
+ dsid = wfrec['dsid']
213
231
  ms = re.search(r'^https://tds.ucar.edu/thredds/(\w+)/', url)
214
232
  if ms:
215
233
  # tds usage
@@ -226,22 +244,24 @@ def fill_cdg_usages(dsids, dranges):
226
244
  tcnt += add_tdsusage_records(year, trecs, cdate)
227
245
  trecs = {}
228
246
  cdate = date
229
- tkey = "{}:{}:{}:{}".format(ip, rdaid, method, etype)
247
+ tkey = "{}:{}:{}:{}".format(ip, dsid, method, etype)
230
248
  if tkey in trecs:
231
249
  trecs[tkey]['size'] += dsize
232
250
  trecs[tkey]['fcount'] += 1
233
251
  else:
234
- trecs[tkey] = {'ip' : ip, 'dsid' : rdaid, 'date' : cdate, 'time' : time, 'size' : dsize,
235
- 'fcount' : 1, 'method' : method, 'etype' : etype, 'engine' : engine}
252
+ wurec = get_wuser_record(ip)
253
+ if not wurec: return 0
254
+ trecs[tkey] = {'ip' : ip, 'dsid' : dsid, 'date' : cdate, 'time' : time, 'size' : dsize,
255
+ 'fcount' : 1, 'method' : method, 'etype' : etype, 'engine' : engine,
256
+ 'org_type' : wurec['org_type'], 'country' : wurec['country'],
257
+ 'email' : wurec['email']}
236
258
  else:
237
259
  # web usage
238
- wfile = pgrec['dataset_file_name']
239
- if not wfile: wfile = pgrec['logic_file_name']
240
260
  fsize = pgrec['dataset_file_size']
241
261
  if not fsize: fsize = pgrec['logic_file_size']
242
262
  method = 'CDP'
243
263
  if pgrec['subset_file_size'] or pgrec['range_request'] or dsize < fsize:
244
- wkey = "{}:{}:{}".format(ip, rdaid, wfile)
264
+ wkey = "{}:{}:{}".format(ip, dsid, wfile)
245
265
  else:
246
266
  wkey = None
247
267
 
@@ -250,7 +270,7 @@ def fill_cdg_usages(dsids, dranges):
250
270
  wrec['size'] += dsize
251
271
  continue
252
272
  wcnt += add_webfile_usage(year, wrec)
253
- wrec = {'ip' : ip, 'dsid' : rdaid, 'wfile' : wfile, 'date' : date,
273
+ wrec = {'ip' : ip, 'dsid' : dsid, 'wid' : wfrec['wid'], 'date' : date,
254
274
  'time' : time, 'quarter' : quarter, 'size' : dsize,
255
275
  'locflag' : 'C', 'method' : method}
256
276
  pwkey = wkey
@@ -263,9 +283,7 @@ def fill_cdg_usages(dsids, dranges):
263
283
  atcnt += tcnt
264
284
  awcnt += wcnt
265
285
  allcnt += pgcnt
266
-
267
- PgLOG.pglog("{}/{} TDS/WEB usage records added for {} CDG entries at {}".format(atcnt, awcnt, allcnt, PgLOG.current_datetime()), PgLOG.LOGWRN)
268
-
286
+ PgLOG.pglog("{}/{} TDS/WEB usage records added for {} CDG entries at {}".format(atcnt, awcnt, allcnt, PgLOG.current_datetime()), PgLOG.LOGWRN)
269
287
 
270
288
  def get_record_date_time(ctime):
271
289
 
@@ -287,12 +305,6 @@ def add_tdsusage_records(year, records, date):
287
305
  record = records[key]
288
306
  cond = "date = '{}' AND time = '{}' AND ip = '{}'".format(date, record['time'], record['ip'])
289
307
  if PgDBI.pgget(USAGE['TDSTBL'], '', cond, PgLOG.LGEREX): continue
290
- record['org_type'] = record['country'] = '-'
291
- ipinfo = PgIPInfo.set_ipinfo(record['ip'])
292
- if ipinfo:
293
- record['org_type'] = ipinfo['org_type']
294
- record['country'] = ipinfo['country']
295
- record['email'] = 'unknown@' + ipinfo['hostname']
296
308
 
297
309
  if add_tds_allusage(year, record):
298
310
  cnt += PgDBI.pgadd(USAGE['TDSTBL'], record, PgLOG.LOGWRN)
@@ -301,39 +313,43 @@ def add_tdsusage_records(year, records, date):
301
313
 
302
314
  return cnt
303
315
 
316
+ def add_tds_allusage(year, logrec):
304
317
 
305
- def add_tds_allusage(year, pgrec):
306
-
307
- record = {'method' : 'CDP', 'source' : 'C'}
308
-
309
- for fld in pgrec:
310
- if re.match(r'^(engine|method|etype|fcount)$', fld): continue
311
- record[fld] = pgrec[fld]
318
+ pgrec = {'method' : 'CDP', 'source' : 'C'}
319
+ pgrec['email'] = logrec['email']
320
+ pgrec['org_type'] = logrec['org_type']
321
+ pgrec['country'] = logrec['country']
322
+ pgrec['dsid'] = logrec['dsid']
323
+ pgrec['date'] = logrec['date']
324
+ pgrec['quarter'] = logrec['quarter']
325
+ pgrec['time'] = logrec['time']
326
+ pgrec['size'] = logrec['size']
327
+ pgrec['ip'] = logrec['ip']
328
+ return PgDBI.add_yearly_allusage(year, pgrec)
312
329
 
313
- return PgDBI.add_yearly_allusage(year, record)
314
330
 
315
331
  #
316
332
  # Fill usage of a single online data file into table dssdb.wusage of DSS PgSQL database
317
333
  #
318
334
  def add_webfile_usage(year, logrec):
319
335
 
320
- pgrec = get_wfile_wid(logrec['dsid'], logrec['wfile'])
321
- if not pgrec: return 0
322
-
323
336
  table = "{}_{}".format(USAGE['WEBTBL'], year)
324
- cond = "wid = {} AND method = '{}' AND date_read = '{}' AND time_read = '{}'".format(pgrec['wid'], logrec['method'], logrec['date'], logrec['time'])
337
+ cdate = logrec['date']
338
+ ip = logrec['ip']
339
+ cond = "wid = {} AND method = '{}' AND date_read = '{}' AND time_read = '{}'".format(logrec['wid'], logrec['method'], cdate, logrec['time'])
325
340
  if PgDBI.pgget(table, "", cond, PgLOG.LOGWRN): return 0
326
341
 
327
- wurec = get_wuser_record(logrec['ip'], logrec['date'])
342
+ wurec = get_wuser_record(ip, cdate, False)
328
343
  if not wurec: return 0
329
- record = {'wid' : pgrec['wid'], 'dsid' : pgrec['dsid']}
344
+
345
+ record = {'wid' : logrec['wid'], 'dsid' : logrec['dsid']}
330
346
  record['wuid_read'] = wurec['wuid']
331
- record['date_read'] = logrec['date']
347
+ record['date_read'] = cdate
332
348
  record['time_read'] = logrec['time']
333
349
  record['size_read'] = logrec['size']
334
350
  record['method'] = logrec['method']
335
351
  record['locflag'] = logrec['locflag']
336
- record['ip'] = logrec['ip']
352
+ record['ip'] = ip
337
353
  record['quarter'] = logrec['quarter']
338
354
 
339
355
  if add_web_allusage(year, logrec, wurec):
@@ -343,7 +359,10 @@ def add_webfile_usage(year, logrec):
343
359
 
344
360
  def add_web_allusage(year, logrec, wurec):
345
361
 
346
- pgrec = {'email' : wurec['email'], 'org_type' : wurec['org_type'], 'country' : wurec['country']}
362
+ pgrec = {'source' : 'C'}
363
+ pgrec['email'] = wurec['email']
364
+ pgrec['org_type'] = wurec['org_type']
365
+ pgrec['country'] = wurec['country']
347
366
  pgrec['dsid'] = logrec['dsid']
348
367
  pgrec['date'] = logrec['date']
349
368
  pgrec['quarter'] = logrec['quarter']
@@ -351,36 +370,54 @@ def add_web_allusage(year, logrec, wurec):
351
370
  pgrec['size'] = logrec['size']
352
371
  pgrec['method'] = logrec['method']
353
372
  pgrec['ip'] = logrec['ip']
354
- pgrec['source'] = 'C'
355
373
  return PgDBI.add_yearly_allusage(year, pgrec)
356
374
 
357
375
  #
358
376
  # return wfile.wid upon success, 0 otherwise
359
377
  #
360
- def get_wfile_wid(dsid, wfile):
378
+ def get_wfile_record(dsids, wfile):
361
379
 
362
- wfcond = "wfile = '{}'".format(wfile)
363
- pgrec = PgSplit.pgget_wfile(dsid, "*", wfcond)
364
- if pgrec:
365
- pgrec['dsid'] = dsid
366
- else:
367
- pgrec = PgDBI.pgget("wfile_delete", "*", "{} AND dsid = '{}'".format(wfcond, dsid))
368
- if not pgrec:
369
- pgrec = PgDBI.pgget("wmove", "wid, dsid", wfcond)
370
- if pgrec:
371
- pgrec = PgSplit.pgget_wfile(pgrec['dsid'], "*", "wid = {}".format(pgrec['wid']))
372
- if pgrec: pgrec['dsid'] = dsid
380
+ for dsid in dsids:
381
+ wkey = "{}{}".format(dsid, wfile)
382
+ if wkey in WFILES: return WFILES[wkey]
383
+ wfcond = "wfile LIKE '%{}'".format(wfile)
384
+ pgrec = None
385
+ for dsid in dsids:
386
+ pgrec = PgSplit.pgget_wfile(dsid, "wid", wfcond)
387
+ if pgrec:
388
+ pgrec['dsid'] = dsid
389
+ wkey = "{}{}".format(dsid, wfile)
390
+ WFILES[wkey] = pgrec
391
+ return pgrec
373
392
 
393
+ for dsid in dsids:
394
+ pgrec = PgDBI.pgget("wfile_delete", "wid, dsid", "{} AND dsid = '{}'".format(wfcond, dsid))
395
+ if not pgrec:
396
+ mvrec = PgDBI.pgget("wmove", "wid, dsid", wfcond)
397
+ if mvrec:
398
+ pgrec = PgSplit.pgget_wfile(mvrec['dsid'], "wid", "wid = {}".format(pgrec['wid']))
399
+ if pgrec: pgrec['dsid'] = mvrec['dsid']
400
+
401
+ if pgrec:
402
+ wkey = "{}{}".format(pgrec['dsid'], wfile)
403
+ WFILES[wkey] = pgrec
374
404
  return pgrec
375
405
 
376
406
  # return wuser record upon success, None otherwise
377
- def get_wuser_record(ip, date):
407
+ def get_wuser_record(ip, date = None, skipwuid = True):
408
+
409
+ if ip in WUSERS: return WUSERS[ip]
378
410
 
379
411
  ipinfo = PgIPInfo.set_ipinfo(ip)
380
412
  if not ipinfo: return None
381
413
 
382
414
  record = {'org_type' : ipinfo['org_type'], 'country' : ipinfo['country']}
383
415
  email = 'unknown@' + ipinfo['hostname']
416
+ if skipwuid:
417
+ record['email'] = email
418
+ WUSERS[ip] = record
419
+ return record
420
+
384
421
  emcond = "email = '{}'".format(email)
385
422
  flds = 'wuid, email, org_type, country, start_date'
386
423
  pgrec = PgDBI.pgget("wuser", flds, emcond, PgLOG.LOGERR)
@@ -388,6 +425,7 @@ def get_wuser_record(ip, date):
388
425
  if PgUtil.diffdate(pgrec['start_date'], date) > 0:
389
426
  pgrec['start_date'] = record['start_date'] = date
390
427
  PgDBI.pgupdt('wuser', record, emcond)
428
+ WUSERS[ip] = pgrec
391
429
  return pgrec
392
430
 
393
431
  # now add one in
@@ -398,6 +436,7 @@ def get_wuser_record(ip, date):
398
436
  if wuid:
399
437
  record['wuid'] = wuid
400
438
  PgLOG.pglog("{} Added as wuid({})".format(email, wuid), PgLOG.LGWNEM)
439
+ WUSERS[ip] = record
401
440
  return record
402
441
 
403
442
  return None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: rda_python_metrics
3
- Version: 1.0.9
3
+ Version: 1.0.11
4
4
  Summary: RDA Python Package to gather and view data usage metrics
5
5
  Author-email: Zaihua Ji <zji@ucar.edu>
6
6
  Project-URL: Homepage, https://github.com/NCAR/rda-python-metrics
@@ -3,7 +3,7 @@ rda_python_metrics/PgView.py,sha256=r6otb3DjfiaQJdg0z8bZQAOlhr4JnrXJzp9wgWh_8qQ,
3
3
  rda_python_metrics/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
4
4
  rda_python_metrics/fillawsusage.py,sha256=dpI3-EFINJECdCSUOm37A97CJNIxOS2nYwA0fB0IpHE,9055
5
5
  rda_python_metrics/fillawsusage.usg,sha256=pD_nYTfg1so9nvVEyPSWdgKvb9gWdtfHJAs3RsT2MMU,609
6
- rda_python_metrics/fillcdgusage.py,sha256=9sDFLAhzndjAVCahIVHofQ4YeslwW7MGIWEDSAQYdqY,13816
6
+ rda_python_metrics/fillcdgusage.py,sha256=D4PhdVyGoISSr0ykNIpmHVMpEAfRZQB9BvaXdYMcGno,15186
7
7
  rda_python_metrics/fillcdgusage.usg,sha256=5lkd4Zdi72nQcha-JtbaLnxl66V4wBDykKwQtUOtMrw,667
8
8
  rda_python_metrics/fillcodusage.py,sha256=Hp3VrlIqHBWRI6Zjbi0nxWZPNvPrKsGBSZ1L9qA9_y0,8006
9
9
  rda_python_metrics/fillcodusage.usg,sha256=3B5IkQ4uwylqY8uEfUdnZ_MXqhYudeylMp5ulhUGXH8,678
@@ -44,9 +44,9 @@ rda_python_metrics/viewwebfile.py,sha256=HSMNkQQawonu6W3blV7g9UbJuNy9VAOn9COqgmj
44
44
  rda_python_metrics/viewwebfile.usg,sha256=lTNi8Yu8BUJuExEDJX-vsJyWUSUIQTS-DiiBEVFo33s,10054
45
45
  rda_python_metrics/viewwebusage.py,sha256=jhoHuRPVNtp7Lbjv0l-Jy_vp2p2nWQC7IVZ0P4JUJ4I,16657
46
46
  rda_python_metrics/viewwebusage.usg,sha256=IsT72v22xyZf7ng_IodVs0dLTsH1Q4BtFvT-gs0-xJY,9946
47
- rda_python_metrics-1.0.9.dist-info/licenses/LICENSE,sha256=1dck4EAQwv8QweDWCXDx-4Or0S8YwiCstaso_H57Pno,1097
48
- rda_python_metrics-1.0.9.dist-info/METADATA,sha256=k5xThxsVVIf1-uVtr5NDQ0VdUPKjZxeNlgSNysXLT24,735
49
- rda_python_metrics-1.0.9.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
50
- rda_python_metrics-1.0.9.dist-info/entry_points.txt,sha256=YfFLzlE3rdufSV471VsDnfYptnt1lR08aSrxPXlKqlY,1185
51
- rda_python_metrics-1.0.9.dist-info/top_level.txt,sha256=aoBgbR_o70TP0QmMW0U6inRHYtfKld47OBmnWnLnDOs,19
52
- rda_python_metrics-1.0.9.dist-info/RECORD,,
47
+ rda_python_metrics-1.0.11.dist-info/licenses/LICENSE,sha256=1dck4EAQwv8QweDWCXDx-4Or0S8YwiCstaso_H57Pno,1097
48
+ rda_python_metrics-1.0.11.dist-info/METADATA,sha256=2D84_3Vy1MxxzM--KjIjJmFitL4QJ0JwkuigW0CErik,736
49
+ rda_python_metrics-1.0.11.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
50
+ rda_python_metrics-1.0.11.dist-info/entry_points.txt,sha256=YfFLzlE3rdufSV471VsDnfYptnt1lR08aSrxPXlKqlY,1185
51
+ rda_python_metrics-1.0.11.dist-info/top_level.txt,sha256=aoBgbR_o70TP0QmMW0U6inRHYtfKld47OBmnWnLnDOs,19
52
+ rda_python_metrics-1.0.11.dist-info/RECORD,,