rda-python-metrics 1.0.6__py3-none-any.whl → 1.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rda-python-metrics might be problematic. Click here for more details.

@@ -1,4 +1,4 @@
1
- #!/usr/bin/env python3
1
+ #!/usr/bin/env python3/
2
2
  #
3
3
  ###############################################################################
4
4
  #
@@ -17,7 +17,6 @@
17
17
  import sys
18
18
  import re
19
19
  from rda_python_common import PgLOG
20
- from rda_python_common import PgIMMA
21
20
  from rda_python_common import PgUtil
22
21
  from rda_python_common import PgDBI
23
22
 
@@ -29,7 +28,7 @@ def main():
29
28
  argv = sys.argv[1:]
30
29
  tables = ['allusage', 'user', 'wuser']
31
30
  table = None
32
-
31
+
33
32
  # check command line
34
33
  for arg in argv:
35
34
  if arg == "-b":
@@ -40,7 +39,7 @@ def main():
40
39
  table = arg
41
40
  else:
42
41
  PgLOG.pglog(arg + ": one table name at a time", PgLOG.LGEREX)
43
-
42
+
44
43
  if not table:
45
44
  print("Usage: fillcountry TableName\n")
46
45
  sys.exit(0)
@@ -51,7 +50,7 @@ def main():
51
50
  PgLOG.cmdlog("fillcountry {}".format(' '.join(argv)))
52
51
 
53
52
  process_countries(table)
54
-
53
+
55
54
  sys.exit(0)
56
55
 
57
56
  def process_countries(table):
@@ -17,12 +17,11 @@ import sys
17
17
  import re
18
18
  from rda_python_common import PgLOG
19
19
  from rda_python_common import PgDBI
20
- from rda_python_common import PgIMMA
21
20
  from rda_python_common import PgUtil
22
21
 
23
22
  # -t dsid, -e email, -v request data volume, -i data input volume,
24
23
  # -m delivery method, -a amount charged, -p pay method, -d request date, -x close date,
25
- # -y close time, -c file count, -s specialist login name, -o order id,
24
+ # -y close time, -c file count, -s specialist login name, -o order id,
26
25
  # mandatory options: -t, -e, -v, and -m
27
26
 
28
27
  #
@@ -63,7 +62,7 @@ def add_one_order(params):
63
62
 
64
63
  year = None
65
64
  record = {}
66
-
65
+
67
66
  record['dsid'] = params['t']
68
67
  record['wuid_request'] = params['u']
69
68
  record['dss_uname'] = params['s']
@@ -100,7 +99,7 @@ def add_to_allusage(record, year, ctime):
100
99
  pgrec['method'] = record['method']
101
100
  pgrec['source'] = 'O'
102
101
  return PgDBI.add_yearly_allusage(year, pgrec)
103
-
102
+
104
103
  return 0
105
104
 
106
105
  #
@@ -23,7 +23,7 @@ from . import PgIPInfo
23
23
 
24
24
  USAGE = {
25
25
  'OSDFTBL' : "wusage",
26
- 'OSDFDIR' : PgLOG.PGLOG["DSSDATA"] + "/work/logs/gridftp/",
26
+ 'OSDFDIR' : PgLOG.PGLOG["DSSDATA"] + "/work/zji/osdflogs/",
27
27
  'OSDFGET' : 'wget -m -nH -np -nd https://pelicanplatform.org/pelican-access-logs/ncar-access-log/',
28
28
  'OSDFLOG' : "{}.log", # YYYY-MM-DD.log
29
29
  }
@@ -95,7 +95,7 @@ def get_log_file_names(option, params, datelimits):
95
95
  else:
96
96
  edate = PgUtil.curdate()
97
97
  pdays = PgUtil.get_weekday(pdate)
98
- if pdays > 0: pdate = PgUtil.adddate(edate, 0, 0, 7-pdays)
98
+ if pdays > 0: pdate = PgUtil.adddate(pdate, 0, 0, 7-pdays)
99
99
  while pdate <= edate:
100
100
  filenames.append(USAGE['OSDFLOG'].format(pdate))
101
101
  pdate = PgUtil.adddate(pdate, 0, 0, 7)
@@ -101,6 +101,8 @@ def fill_rdadb(option, params):
101
101
  PgLOG.pgsystem("fillglobususage {} {}".format(RDADB['BCKGRND'], filecond), PgLOG.LGWNEM, 5)
102
102
  # fill available AWS web data usages
103
103
  PgLOG.pgsystem("fillawsusage {} {}".format(RDADB['BCKGRND'], filecond), PgLOG.LGWNEM, 5)
104
+ # fill available OSDF web data usages
105
+ PgLOG.pgsystem("fillosdfusage {} {}".format(RDADB['BCKGRND'], filecond), PgLOG.LGWNEM, 5)
104
106
 
105
107
  if RDADB['DOMAIL']: send_email_notice()
106
108
  PgLOG.pglog("End Filling RDADB info at {}".format(PgLOG.current_datetime()), PgLOG.LGWNEM)
@@ -23,16 +23,18 @@ from rda_python_common import PgFile
23
23
  from rda_python_common import PgSIG
24
24
 
25
25
  # the defined options for archiving different logs
26
- WLOG = 0x01 # archive web log
26
+ WLOG = 0x21 # archive web log
27
27
  TLOG = 0x02 # archive tds log
28
28
  DLOG = 0x04 # archive dssdb logs
29
29
  SLOG = 0x08 # append dssdb sub batch logs
30
30
  ALOG = 0x10 # archive AWS web log
31
+ OLOG = 0x20 # archive OSDF web log
31
32
 
32
33
  LOGS = {
33
34
  'OPTION' : 0,
34
35
  'AWSLOG' : PgLOG.PGLOG["TRANSFER"] + "/AWSera5log",
35
36
  'WEBLOG' : PgLOG.PGLOG["DSSDATA"] + "/work/logs/gridftp",
37
+ 'OSDFLOG' : PgLOG.PGLOG["DSSDATA"] + "/zji/osdflogs",
36
38
  'MGTLOG' : "/data/logs",
37
39
  'TDSLOG' : "/data/logs/nginx",
38
40
  'RDALOG' : PgLOG.PGLOG['LOGPATH'],
@@ -70,6 +72,8 @@ def main():
70
72
  LOGS['OPTION'] |= WLOG
71
73
  elif option == "a":
72
74
  LOGS['OPTION'] |= ALOG
75
+ elif option == "o":
76
+ LOGS['OPTION'] |= OLOG
73
77
  elif option == "s":
74
78
  LOGS['OPTION'] |= SLOG
75
79
  elif option == "t":
@@ -91,6 +95,7 @@ def main():
91
95
  if LOGS['OPTION']&SLOG: append_dssdb_sublog()
92
96
  if LOGS['OPTION']&DLOG: archive_dssdb_log()
93
97
  if LOGS['OPTION']&WLOG: archive_web_log(smonth)
98
+ if LOGS['OPTION']&OLOG: archive_osdf_log(smonth)
94
99
  if LOGS['OPTION']&ALOG: archive_aws_log(smonth)
95
100
  if LOGS['OPTION']&TLOG: archive_tds_log(smonth)
96
101
 
@@ -142,6 +147,38 @@ def archive_web_log(smonth):
142
147
  s = 's' if tcnt > 1 else ''
143
148
  PgLOG.pglog("{}: {} globus log{} tarred, gzipped and archived at {}".format(afile, tcnt, s, PgLOG.current_datetime()), PgLOG.LGWNEM)
144
149
 
150
+ #
151
+ # Archive OSDF web log files to LOGS['DECSLOGS']
152
+ #
153
+ def archive_osdf_log(smonth):
154
+
155
+ (yr, mn) = get_year_month(smonth)
156
+ PgFile.change_local_directory(LOGS['DECSLOGS'], PgLOG.LGEREM)
157
+ logpath = LOGS['LOGPATH'] if LOGS['LOGPATH'] else LOGS['OSDFLOG']
158
+ afile = "osdfweb{}-{}.log.tar".format(yr, mn)
159
+ dfile = "./OSDFLOG/{}.gz".format(afile)
160
+ if op.exists(dfile):
161
+ PgLOG.pglog("{}: file exists already under {}, remove it before backup again".format(dfile, LOGS['DECSLOGS']), PgLOG.LGWNEM)
162
+ return
163
+
164
+ if op.exists(afile): PgFile.delete_local_file(afile)
165
+
166
+ logfiles = sorted(glob.glob("{}/{}-{}-??.log".format(logpath, yr, mn)))
167
+ topt = '-cvf'
168
+ tcnt = 0
169
+ for logfile in logfiles:
170
+ lfile = op.basename(logfile)
171
+ tcmd = "tar {} {} -C {} {}".format(topt, afile, logpath, lfile)
172
+ tcnt += PgLOG.pgsystem(tcmd, PgLOG.LGWNEM, 5)
173
+ topt = '-uvf'
174
+
175
+ if tcnt > 0:
176
+ PgLOG.pgsystem("gzip " + afile, PgLOG.LGWNEM, 5)
177
+ afile += '.gz'
178
+ PgFile.move_local_file(dfile, afile, PgLOG.LGWNEM)
179
+ s = 's' if tcnt > 1 else ''
180
+ PgLOG.pglog("{}: {} globus log{} tarred, gzipped and archived at {}".format(afile, tcnt, s, PgLOG.current_datetime()), PgLOG.LGWNEM)
181
+
145
182
  #
146
183
  # Archive AWS web log files to LOGS['DECSLOGS']
147
184
  #
@@ -3,7 +3,7 @@
3
3
  at any time. It checks if the given log files are archived or not and will
4
4
  ignore the ones that are backed up already.
5
5
 
6
- Usage: logarch [-b] [-a] [-d] [-w] [-s] [-t] [-p LogPath] \
6
+ Usage: logarch [-b] [-a] [-d] [-o] [-w] [-s] [-t] [-p LogPath] \
7
7
  [-m YearMonth] [-n]
8
8
 
9
9
  - Option -b, do not display processing info on screen;
@@ -12,6 +12,8 @@
12
12
 
13
13
  - Option -d, archive dssdb log files;
14
14
 
15
+ - Option -o, archive OSDF log files;
16
+
15
17
  - Option -p, provide a LogPath if not the fefault one;
16
18
 
17
19
  - Option -s, append PBS sublog files to common log files;
@@ -0,0 +1,36 @@
1
+ #
2
+ ##################################################################################
3
+ #
4
+ # Title: pgsyspath
5
+ # Author: Zaihua Ji, zji@ucar.edu
6
+ # Date: 10/21/2020
7
+ # Purpose: python script to set sys.path properly to include paths for local
8
+ # modules
9
+ #
10
+ # Work File: $DSSHOME/bin/python/pgsyspath.py*
11
+ # Github: https://github.com/NCAR/rda-utility-programs.git
12
+ #
13
+ ##################################################################################
14
+
15
+ import sys
16
+ import os
17
+ import re
18
+
19
+ #
20
+ # intinialize the sys.path to include paths for local modules
21
+ #
22
+ def include_local_paths():
23
+
24
+ rpath = '/glade/u/home/rdadata'
25
+ if re.match('PG', rpath): rpath = os.getenv('DSSHOME', '/glade/u/home/rdadata')
26
+
27
+ # add more path to pgpaths list as needed
28
+ pgpaths = [rpath + '/lib/python',
29
+ rpath + '/lib/python/site-packages']
30
+ for pgpath in pgpaths:
31
+ if pgpath not in sys.path: sys.path.insert(0, pgpath)
32
+
33
+ #
34
+ # call to include local paths when this module is imported or run independently
35
+ #
36
+ include_local_paths()
@@ -14,11 +14,11 @@
14
14
  import os
15
15
  import re
16
16
  import sys
17
- import pgsyspath
18
- import PgLOG
19
- import PgUtil
20
- import PgView
21
- import PgDBI
17
+ from . import pgsyspath
18
+ from . import PgView
19
+ from rda_python_common import PgLOG
20
+ from rda_python_common import PgUtil
21
+ from rda_python_common import PgDBI
22
22
 
23
23
  VUSG = {
24
24
  'SNMS' : "ABCDEFGHIJKLMNOPQRSTVWYZ", # all available short field names in FLDS
@@ -38,7 +38,7 @@ VUSG = {
38
38
  # column 0 - column title showing in usage view
39
39
  # column 1 - field name in format as shown in select clauses
40
40
  # column 2 - field name shown in where condition query string
41
- # column 3 - table name that the field belongs to
41
+ # column 3 - table name that the field belongs to
42
42
  # column 4 - output field length, the longer one of data size and comlun title, determine
43
43
  # dynamically if it is 0. Negative values indicate right justification
44
44
  # column 5 - precision for floating point value if positive and show total value if not zero
@@ -76,7 +76,7 @@ FLDS = {
76
76
  # column 0 - expand ID for group of fields
77
77
  # column 1 - field name shown in where condition query string
78
78
  # column 2 - field name in format as shown in select clauses
79
- # column 3 - table name that the field belongs to
79
+ # column 3 - table name that the field belongs to
80
80
  EXPAND = {
81
81
  # SHRTNM EXPID CNDSTR FIELDNAME TBLNAM
82
82
  'D' : ["TIME", "dDmy"],
@@ -105,7 +105,7 @@ EXPAND = {
105
105
  # A -- number or records to return
106
106
  # c -- array of specified country codes
107
107
  # C -- a string of short field names for viewing usages
108
- # d -- array of specified dates
108
+ # d -- array of specified dates
109
109
  # D -- dates range, array of 1 or 2 dates in format of YYYY-MM-DD
110
110
  # e -- array of specified email addresses
111
111
  # E -- use given date or date range for email notice of data update
@@ -116,13 +116,13 @@ EXPAND = {
116
116
  # j -- array of specified projects
117
117
  # l -- array of specified last names
118
118
  # L -- column delimiter for output
119
- # m -- array of specified months
120
- # M -- array of specified download methods
121
- # n -- array of specified order numbers
119
+ # m -- array of specified months
120
+ # M -- array of specified download methods
121
+ # n -- array of specified order numbers
122
122
  # N -- number request range, arrage of 1 or 2 integers
123
123
  # o -- array of specified orginization types
124
124
  # O -- a string of short field names for sorting on
125
- # p -- array of specified payment methods
125
+ # p -- array of specified payment methods
126
126
  # s -- output data size range, arrage of 1 or 2 sizes in unit of MByte
127
127
  # S -- array of login names of specialists who processed the orders
128
128
  # t -- array of specified dataset names
@@ -130,12 +130,12 @@ EXPAND = {
130
130
  # U -- use given unit for file or data sizes
131
131
  # v -- aray of specified roder numbers
132
132
  # w -- generate view without totals
133
- # y -- array of specified years
133
+ # y -- array of specified years
134
134
  # z -- generate view including entries without usage
135
135
  params = {}
136
136
 
137
137
  # relationship between parameter options and short field names, A option is not
138
- # related to a field name if it is not in keys %SNS
138
+ # related to a field name if it is not in keys %SNS
139
139
  SNS = {
140
140
  'c' : 'N', 'd' : 'D', 'D' : 'D', 'e' : 'E', 'g' : 'G', 'i' : 'I', 'j' : 'J',
141
141
  'l' : 'L', 'm' : 'M', 'M' : 'W', 'n' : 'V', 'N' : 'H', 'o' : 'O', 'p' : 'K',
@@ -194,18 +194,18 @@ def main():
194
194
  params[option] = 1
195
195
  elif inputs:
196
196
  params[option] = inputs # record input array
197
-
197
+
198
198
  if not params:
199
199
  PgLOG.show_usage(pgname)
200
200
  else:
201
201
  check_enough_options()
202
-
202
+
203
203
  if 'o' not in params:
204
204
  if 'e' not in params:
205
205
  params['o'] = ['!', "'DSS'"] # default to exclude 'DSS' for organization
206
206
  elif params['o'][0] == "'ALL'":
207
207
  del params['o']
208
-
208
+
209
209
  usgtable = "ousage"
210
210
  build_query_strings(usgtable) # build tablenames, fieldnames, and conditions
211
211
  records = PgDBI.pgmget(tablenames, fieldnames, condition, PgLOG.UCLWEX)
@@ -16,9 +16,9 @@
16
16
  import os
17
17
  import re
18
18
  import sys
19
- import PgLOG
20
- import PgUtil
21
- import PgDBI
19
+ from rda_python_common import PgLOG
20
+ from rda_python_common import PgUtil
21
+ from rda_python_common import PgDBI
22
22
  from . import PgView
23
23
 
24
24
  FILE = {
@@ -38,9 +38,9 @@ FILE = {
38
38
  # column 0 - column title showing in mss file view
39
39
  # column 1 - field name in format as shown in select clauses
40
40
  # column 2 - field name shown in where condition query string
41
- # column 3 - table name that the field belongs to
41
+ # column 3 - table name that the field belongs to
42
42
  # column 4 - output field length, the longer one of data size and comlun title, determine
43
- # dynamically if it is 0. Negative values indicate right justification
43
+ # dynamically if it is 0. Negative values indicate right justification
44
44
  # column 5 - precision for floating point value if positive and show total value if not zero
45
45
  # column 6 - field flag to indicate it is a group, distinct or sum field
46
46
  FLDS = {
@@ -77,7 +77,7 @@ FLDS = {
77
77
  # a -- 1 to view all usage info available
78
78
  # A -- number or records to return
79
79
  # C -- a string of short field names for viewing usages
80
- # d -- array of specified dates of file last written
80
+ # d -- array of specified dates of file last written
81
81
  # D -- last written dates range, array of 1 or 2 dates in format of YYYY-MM-DD
82
82
  # e -- array of specified email addresses
83
83
  # f -- array of specified online file names
@@ -90,16 +90,16 @@ FLDS = {
90
90
  # J -- 1 to include group title for GROUP
91
91
  # l -- array of specified last names
92
92
  # L -- column delimiter for output
93
- # m -- array of specified months of file last written
94
- # M -- array of specified months of file created
95
- # n -- array of specified user numbers
93
+ # m -- array of specified months of file last written
94
+ # M -- array of specified months of file created
95
+ # n -- array of specified user numbers
96
96
  # D -- dates range, datasets created between, array of 1 or 2 dates in format of YYYY-MM-DD
97
97
  # o -- array of specified file status
98
98
  # O -- a string of short field names for sorting on
99
99
  # p -- array of web file types, Data, Document, and etc.
100
100
  # r -- group index range, array of 1 or 2 group indices
101
101
  # s -- file size range, arrage of 1 or 2 sizes in unit of MByte
102
- # S -- specialist lognames who handle the datasets
102
+ # S -- specialist lognames who handle the datasets
103
103
  # t -- array of specified dataset names
104
104
  # T -- dataset range, array of 1 or 2 dataset names
105
105
  # u -- array of specified specialist user names
@@ -113,7 +113,7 @@ FLDS = {
113
113
  params = {}
114
114
 
115
115
  # relationship between parameter options and short field names, A option is not
116
- # related to a field name if it is not in keys %SNS
116
+ # related to a field name if it is not in keys %SNS
117
117
  SNS = {
118
118
  'd' : 'D', 'D' : 'D', 'e' : 'E', 'f' : 'F', 'F' : 'F', 'g' : 'G', 'i' : 'I',
119
119
  'l' : 'L', 'm' : 'M', 'M' : 'H', 'N' : 'N', 'o' : 'O', 'p' : 'P', 'r' : 'G',
@@ -174,7 +174,7 @@ def main():
174
174
  params[option] = 1
175
175
  elif inputs:
176
176
  params[option] = inputs # record input array
177
-
177
+
178
178
  if not params:
179
179
  PgLOG.show_usage(pgname)
180
180
  else:
@@ -196,7 +196,7 @@ def main():
196
196
  PgView.simple_output(params, FLDS, records, totals)
197
197
 
198
198
  PgLOG.pgexit(0)
199
-
199
+
200
200
  #
201
201
  # cehck if enough information entered on command line for generate view/report, exit if not
202
202
  #
@@ -209,7 +209,7 @@ def check_enough_options():
209
209
  if sn == 'X': continue # do not process INDEX field
210
210
  if FILE['SNMS'].find(sn) < 0:
211
211
  PgLOG.pglog("{}: Field {} must be in field names '{}X'".format(pgname, sn, FILE['SNMS']), PgLOG.LGWNEX)
212
-
212
+
213
213
  if 'g' in params or 'G' in params:
214
214
  if 't' not in params:
215
215
  PgLOG.pglog("Miss dataset condition via Option -t for processing Group", PgLOG.LGWNEX)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: rda_python_metrics
3
- Version: 1.0.6
3
+ Version: 1.0.8
4
4
  Summary: RDA Python Package to gather and view data usage metrics
5
5
  Author-email: Zaihua Ji <zji@ucar.edu>
6
6
  Project-URL: Homepage, https://github.com/NCAR/rda-python-metrics
@@ -13,6 +13,9 @@ Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
14
  Requires-Dist: rda_python_common
15
15
  Requires-Dist: rda_python_setuid
16
+ Requires-Dist: geoip2
17
+ Requires-Dist: ipinfo
18
+ Requires-Dist: httplib2
16
19
  Dynamic: license-file
17
20
 
18
21
  RDA Python Package to gather and view data usage metrics.
@@ -5,25 +5,26 @@ rda_python_metrics/fillawsusage.py,sha256=dpI3-EFINJECdCSUOm37A97CJNIxOS2nYwA0fB
5
5
  rda_python_metrics/fillawsusage.usg,sha256=qHdQGMbXlbsqEAoEmM71_zlE3yw704nyhXZ4sw4FIbI,631
6
6
  rda_python_metrics/fillcodusage.py,sha256=Hp3VrlIqHBWRI6Zjbi0nxWZPNvPrKsGBSZ1L9qA9_y0,8006
7
7
  rda_python_metrics/fillcodusage.usg,sha256=QAM6wqycyDb-6t5PhlL2niPiGxZ4Gln0QVdKdyN3ShI,684
8
- rda_python_metrics/fillcountry.py,sha256=YYm0lIIfljA7rIAOFgP1fhyhqkBDOY6vdk7g11WFPLc,2359
8
+ rda_python_metrics/fillcountry.py,sha256=7i5LNi3scRoyRCT6t7aeNTGKOpxzJ2mA9tnvUqje2AU,2314
9
9
  rda_python_metrics/fillendtime.py,sha256=skZttlpoY19g0dGwqGQI8t_1YPPTPEXwg3EfNlfL90I,2533
10
10
  rda_python_metrics/fillglobususage.py,sha256=-cvIipaFju75aw9axHkx6JIe9HWYwQOD8-0blQaxxUM,9442
11
11
  rda_python_metrics/fillglobususage.usg,sha256=p-f5hGGDkcM2O8ADEP0Do_lMIIFj8KkiFG1HJ-YgtQM,637
12
12
  rda_python_metrics/fillipinfo.py,sha256=xIVJ6nDvVvMOjb7s_6-YDLVRBC09pDFugnjB3Nrmqus,5641
13
13
  rda_python_metrics/fillipinfo.usg,sha256=taITqZa6GL0-wxXcMEdMU3ZlQbJ1CsmFclTvXpG5TLg,644
14
- rda_python_metrics/filloneorder.py,sha256=MhllvtS2PM1SMkf1dHmGTIppIkt__SRMKIUPrv_iRRU,5472
14
+ rda_python_metrics/filloneorder.py,sha256=H4XIskpViaiKgc9StvS4cQT_LpM3VYLCsuuqCm4UqmA,5425
15
15
  rda_python_metrics/filloneorder.usg,sha256=mtOySKx6-D4k2bbTcmi6cSYtINiycRyHQkHozi0CQu0,1466
16
- rda_python_metrics/fillosdfusage.py,sha256=5aC5ocIl9TKHJg-2oBMB1yu0AZBVcEYr6IRoxVBJS6s,8758
16
+ rda_python_metrics/fillosdfusage.py,sha256=l9hV58WOZmvmenlm_zqWXZ03u6dkn_dQCI91fQYKrEk,8758
17
17
  rda_python_metrics/fillosdfusage.usg,sha256=Jlyn6K1LS1ZqDbmOQpR3KNVnp43-kfyvqYlcDt4jPOM,632
18
- rda_python_metrics/fillrdadb.py,sha256=MDcl6oM-A1xek9SBP9Gvor_Ouq5dB7mki2Vf_w8VDNk,5199
18
+ rda_python_metrics/fillrdadb.py,sha256=cb6upPApAZloOouUSzZZmjGvOsYT_Nzh9Lz926WE3ZQ,5333
19
19
  rda_python_metrics/fillrdadb.usg,sha256=E_Bf4G8yVABogjRmIOaIbTGgnII2W6RltaFad2XEV2Q,1228
20
20
  rda_python_metrics/filltdsusage.py,sha256=Hg09ogrqaTfULyBy3eLGTudDs_5JrXEpf9_V5uEGiN0,10148
21
21
  rda_python_metrics/filltdsusage.usg,sha256=yqTHRe8WpZWpu3gso_obgt6LO41-JC27rTIDfdZcISo,538
22
22
  rda_python_metrics/filluser.py,sha256=CvaMRaUPaR9nxJAExkLTb3Ci4sD7RQMOpWhWJdbyMF0,8907
23
23
  rda_python_metrics/filluser.usg,sha256=Xgqi0QwA9-4jpYj3L4Q4TISpVwRlsomt2G7T0oYAFak,520
24
- rda_python_metrics/logarch.py,sha256=ay6ku3T5_-2O3QhR6EaFzHS9iUetkVpuEh-VsIOTv-4,12303
25
- rda_python_metrics/logarch.usg,sha256=N8c8ixguGEW7GiqsY_0xnG6A2713PrZ95XU_vJQH_uc,911
24
+ rda_python_metrics/logarch.py,sha256=cMty6vI5t4oBAKnA4VYZjEvfYoj_8cSUuW0orPOJOZQ,13721
25
+ rda_python_metrics/logarch.usg,sha256=6ksFxu_-lzhRpQE2YFAqgGVhwFP4PTweCbFBuGhhICU,960
26
26
  rda_python_metrics/pgperson.py,sha256=q_0jSlTyqEQh2J8yq5Nrf9TLg1awvREdp3E8XyN7PoI,2068
27
+ rda_python_metrics/pgsyspath.py,sha256=DZhFp-r-LzN0qrHtfdCYfnGDnmD_R4ufuEduk0_vRIw,1039
27
28
  rda_python_metrics/pgusername.py,sha256=sNtPNKdubZYNPWR89pAXHGTuP6q8kuf71ig7-XJLXrQ,1245
28
29
  rda_python_metrics/viewallusage.py,sha256=B-4s3aoAkAkeB1QM_xfZceRe_QI16vwpI81ekb8VPgc,15586
29
30
  rda_python_metrics/viewallusage.usg,sha256=ABtOCqGoE6HKE1IPsk02ppC883vNiJILRPBRrpbnzRM,9296
@@ -31,19 +32,19 @@ rda_python_metrics/viewcheckusage.py,sha256=HougqjDAOVG6pYglFjyHQ-UdLBcYe7v_jzU1
31
32
  rda_python_metrics/viewcheckusage.usg,sha256=KuJFycggGiUcSezQ9vywDbituvu63SZ-ZnNTaMpbc-A,8930
32
33
  rda_python_metrics/viewcodusage.py,sha256=ScyZFjMSss1GNZdmXVs9wWRbaPZRahaFXsWG8kIVRP4,14012
33
34
  rda_python_metrics/viewcodusage.usg,sha256=_kgF7Tk2_n1JVf9km2MiwO86vtZRCdu4i8hkWN0eETo,8637
34
- rda_python_metrics/viewordusage.py,sha256=9zIJkThKgSOW58qXyQs2Hq8EeEp645lnpD5bstSzR_0,15370
35
+ rda_python_metrics/viewordusage.py,sha256=0TIUM-V63m4Ol0rW4QIbnw9FrFlSQ2Rv3hGgopSIIz0,15435
35
36
  rda_python_metrics/viewordusage.usg,sha256=TqZDQk-DzOWC6_uzmFzGyA4F98ojOifANJGv9BCfH1I,10599
36
37
  rda_python_metrics/viewrqstusage.py,sha256=wNH5DTEBYrUQKAms10weBH939r-m3tLXXg5PwS6bzlk,16690
37
38
  rda_python_metrics/viewrqstusage.usg,sha256=Ii5-7h_RO2rkoE9VLxuLhc9klgkEJSqHoDrsOlQOTKo,10481
38
39
  rda_python_metrics/viewtdsusage.py,sha256=-dMj2xaxEHxRM8c8ifkHYEWURQBze_3ITe9BAuglD0M,14276
39
40
  rda_python_metrics/viewtdsusage.usg,sha256=-q3tsV3S0-pnfSXFwH62MWm_l1WywPoQGHc5l9LNXI0,8884
40
- rda_python_metrics/viewwebfile.py,sha256=BqtA_YNhprnrGE6GWEW7n5PDxzNlljfv_MOPezOQaeU,13594
41
+ rda_python_metrics/viewwebfile.py,sha256=HSMNkQQawonu6W3blV7g9UbJuNy9VAOn9COqgmjv5dI,13646
41
42
  rda_python_metrics/viewwebfile.usg,sha256=lTNi8Yu8BUJuExEDJX-vsJyWUSUIQTS-DiiBEVFo33s,10054
42
43
  rda_python_metrics/viewwebusage.py,sha256=jhoHuRPVNtp7Lbjv0l-Jy_vp2p2nWQC7IVZ0P4JUJ4I,16657
43
44
  rda_python_metrics/viewwebusage.usg,sha256=IsT72v22xyZf7ng_IodVs0dLTsH1Q4BtFvT-gs0-xJY,9946
44
- rda_python_metrics-1.0.6.dist-info/licenses/LICENSE,sha256=1dck4EAQwv8QweDWCXDx-4Or0S8YwiCstaso_H57Pno,1097
45
- rda_python_metrics-1.0.6.dist-info/METADATA,sha256=AqL2-wo2fraDbSAMeHoU0tiagTuL09SDurphGFF7nuo,667
46
- rda_python_metrics-1.0.6.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
47
- rda_python_metrics-1.0.6.dist-info/entry_points.txt,sha256=uoCDpwsZJNKXSxkngolWGgmi7arXp7BM-p6tVHVWvgc,1133
48
- rda_python_metrics-1.0.6.dist-info/top_level.txt,sha256=aoBgbR_o70TP0QmMW0U6inRHYtfKld47OBmnWnLnDOs,19
49
- rda_python_metrics-1.0.6.dist-info/RECORD,,
45
+ rda_python_metrics-1.0.8.dist-info/licenses/LICENSE,sha256=1dck4EAQwv8QweDWCXDx-4Or0S8YwiCstaso_H57Pno,1097
46
+ rda_python_metrics-1.0.8.dist-info/METADATA,sha256=pLLx0jNG31tY-7pjEZvH5EP1nfMd2_kJuKjU52VpeKs,735
47
+ rda_python_metrics-1.0.8.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
48
+ rda_python_metrics-1.0.8.dist-info/entry_points.txt,sha256=uoCDpwsZJNKXSxkngolWGgmi7arXp7BM-p6tVHVWvgc,1133
49
+ rda_python_metrics-1.0.8.dist-info/top_level.txt,sha256=aoBgbR_o70TP0QmMW0U6inRHYtfKld47OBmnWnLnDOs,19
50
+ rda_python_metrics-1.0.8.dist-info/RECORD,,