rda-python-common 1.0.41__tar.gz → 1.0.43__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rda-python-common might be problematic. Click here for more details.
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/PKG-INFO +1 -1
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/pyproject.toml +1 -1
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common/PgDBI.py +52 -47
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common/PgFile.py +14 -2
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common/PgLOG.py +3 -2
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common/PgOPT.py +1 -1
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common.egg-info/PKG-INFO +1 -1
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/LICENSE +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/README.md +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/setup.cfg +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common/PgCMD.py +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common/PgLock.py +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common/PgSIG.py +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common/PgSplit.py +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common/PgUtil.py +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common/__init__.py +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common.egg-info/SOURCES.txt +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common.egg-info/dependency_links.txt +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common.egg-info/requires.txt +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common.egg-info/top_level.txt +0 -0
- {rda_python_common-1.0.41 → rda_python_common-1.0.43}/test/test_common.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rda_python_common
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.43
|
|
4
4
|
Summary: RDA Python common library codes shared by other RDA python packages
|
|
5
5
|
Author-email: Zaihua Ji <zji@ucar.edu>
|
|
6
6
|
Project-URL: Homepage, https://github.com/NCAR/rda-python-common
|
|
@@ -191,7 +191,7 @@ def default_scinfo(dbname = None, scname = None, dbhost = None, lnname = None, p
|
|
|
191
191
|
# get the datbase sock file name of a given dbname for local connection
|
|
192
192
|
#
|
|
193
193
|
def get_dbsock(dbname):
|
|
194
|
-
|
|
194
|
+
|
|
195
195
|
return (DBSOCKS[dbname] if dbname in DBSOCKS else DBSOCKS['default'])
|
|
196
196
|
|
|
197
197
|
#
|
|
@@ -392,7 +392,7 @@ def try_add_table(dberror, logact):
|
|
|
392
392
|
#
|
|
393
393
|
def add_a_table(tname, logact):
|
|
394
394
|
|
|
395
|
-
add_new_table(tname, logact = logact)
|
|
395
|
+
add_new_table(tname, logact = logact)
|
|
396
396
|
|
|
397
397
|
#
|
|
398
398
|
# add a new table for given table name
|
|
@@ -462,7 +462,7 @@ def check_dberror(pgerr, pgcnt, sqlstr, ary, logact = PGDBI['ERRLOG']):
|
|
|
462
462
|
try_add_table(dberror, logact)
|
|
463
463
|
return PgLOG.SUCCESS
|
|
464
464
|
|
|
465
|
-
if logact&PgLOG.DOLOCK and pgcode and re.match(r'^55\w\w\w$', pgcode):
|
|
465
|
+
if logact&PgLOG.DOLOCK and pgcode and re.match(r'^55\w\w\w$', pgcode):
|
|
466
466
|
logact &= ~PgLOG.EXITLG # no exit for lock error
|
|
467
467
|
return qelog(dberror, 0, sqlstr, ary, pgcnt, logact)
|
|
468
468
|
|
|
@@ -476,7 +476,7 @@ def pgbatch(sqlfile, foreground = 0):
|
|
|
476
476
|
# slave = "/{}/{}.slave".format(PGDBI['VWHOME'], PGDBI['VWHOST'])
|
|
477
477
|
# if not op.exists(slave): default_scname()
|
|
478
478
|
|
|
479
|
-
dbhost = 'localhost' if PGDBI['DBSHOST'] == PgLOG.PGLOG['HOSTNAME'] else PGDBI['DBHOST']
|
|
479
|
+
dbhost = 'localhost' if PGDBI['DBSHOST'] == PgLOG.PGLOG['HOSTNAME'] else PGDBI['DBHOST']
|
|
480
480
|
options = "-h {} -p {}".format(dbhost, PGDBI['DBPORT'])
|
|
481
481
|
pwname = get_pgpass_password()
|
|
482
482
|
os.environ['PGPASSWORD'] = pwname
|
|
@@ -518,7 +518,7 @@ def pgconnect(reconnect = 0, pgcnt = 0, autocommit = True):
|
|
|
518
518
|
else:
|
|
519
519
|
config['host'] = PGDBI['DBHOST'] if PGDBI['DBHOST'] else PGDBI['DEFHOST']
|
|
520
520
|
if not PGDBI['DBPORT']: PGDBI['DBPORT'] = get_dbport(PGDBI['DBNAME'])
|
|
521
|
-
if PGDBI['DBPORT']: config['port'] = PGDBI['DBPORT']
|
|
521
|
+
if PGDBI['DBPORT']: config['port'] = PGDBI['DBPORT']
|
|
522
522
|
config['password'] = '***'
|
|
523
523
|
sqlstr = "psycopg2.connect(**{})".format(config)
|
|
524
524
|
config['password'] = get_pgpass_password()
|
|
@@ -529,7 +529,7 @@ def pgconnect(reconnect = 0, pgcnt = 0, autocommit = True):
|
|
|
529
529
|
if autocommit: pgdb.autocommit = autocommit
|
|
530
530
|
return pgdb
|
|
531
531
|
except PgSQL.Error as pgerr:
|
|
532
|
-
if not check_dberror(pgerr, pgcnt, sqlstr, None, PGDBI['EXITLG']): return PgLOG.FAILURE
|
|
532
|
+
if not check_dberror(pgerr, pgcnt, sqlstr, None, PGDBI['EXITLG']): return PgLOG.FAILURE
|
|
533
533
|
pgcnt += 1
|
|
534
534
|
|
|
535
535
|
#
|
|
@@ -555,7 +555,7 @@ def pgcursor():
|
|
|
555
555
|
if pgcnt == 0 and pgdb.closed:
|
|
556
556
|
pgconnect(1)
|
|
557
557
|
elif not check_dberror(pgerr, pgcnt, '', None, PGDBI['EXITLG']):
|
|
558
|
-
return PgLOG.FAILURE
|
|
558
|
+
return PgLOG.FAILURE
|
|
559
559
|
else:
|
|
560
560
|
break
|
|
561
561
|
pgcnt += 1
|
|
@@ -625,7 +625,7 @@ def pgsequence(tablename, logact = PGDBI['ERRLOG']):
|
|
|
625
625
|
pgrec = pgget('information_schema.columns', 'column_name', condition, logact)
|
|
626
626
|
seqname = pgrec['column_name'] if pgrec else None
|
|
627
627
|
SEQUENCES[tablename] = seqname
|
|
628
|
-
|
|
628
|
+
|
|
629
629
|
return seqname
|
|
630
630
|
|
|
631
631
|
#
|
|
@@ -645,7 +645,7 @@ def check_default_value(dflt, isint):
|
|
|
645
645
|
|
|
646
646
|
#
|
|
647
647
|
# local fucntion: insert prepare pgadd()/pgmadd() for given table and field names
|
|
648
|
-
# according to options of multiple place holds and returning sequence id
|
|
648
|
+
# according to options of multiple place holds and returning sequence id
|
|
649
649
|
#
|
|
650
650
|
def prepare_insert(tablename, fields, multi = True, getid = None):
|
|
651
651
|
|
|
@@ -708,7 +708,7 @@ def pgadd(tablename, record, logact = PGDBI['ERRLOG'], getid = None):
|
|
|
708
708
|
global curtran
|
|
709
709
|
if not record: return PgLOG.pglog("Nothing adds to " + tablename, logact)
|
|
710
710
|
if logact&PgLOG.DODFLT: prepare_default(tablename, record, logact)
|
|
711
|
-
if logact&PgLOG.AUTOID and not getid: getid = pgsequence(tablename, logact)
|
|
711
|
+
if logact&PgLOG.AUTOID and not getid: getid = pgsequence(tablename, logact)
|
|
712
712
|
sqlstr = prepare_insert(tablename, list(record), True, getid)
|
|
713
713
|
values = tuple(record.values())
|
|
714
714
|
|
|
@@ -804,7 +804,7 @@ def pgmadd(tablename, records, logact = PGDBI['ERRLOG'], getid = None):
|
|
|
804
804
|
#
|
|
805
805
|
def prepare_select(tablenames, fields = None, condition = None, cndflds = None, logact = 0):
|
|
806
806
|
|
|
807
|
-
sqlstr = ''
|
|
807
|
+
sqlstr = ''
|
|
808
808
|
if tablenames:
|
|
809
809
|
if fields:
|
|
810
810
|
sqlstr = "SELECT " + fields
|
|
@@ -838,7 +838,7 @@ def prepare_select(tablenames, fields = None, condition = None, cndflds = None,
|
|
|
838
838
|
# tablenames: comma deliminated string of one or more tables and more than one table for joining,
|
|
839
839
|
# fields: comma deliminated string of one or more field names,
|
|
840
840
|
# condition: querry conditions for where clause
|
|
841
|
-
# return a dict reference with keys as field names upon success
|
|
841
|
+
# return a dict reference with keys as field names upon success
|
|
842
842
|
#
|
|
843
843
|
def pgget(tablenames, fields, condition = None, logact = 0):
|
|
844
844
|
|
|
@@ -865,7 +865,7 @@ def pgget(tablenames, fields, condition = None, logact = 0):
|
|
|
865
865
|
record[colname] = val
|
|
866
866
|
pgcur.close()
|
|
867
867
|
except PgSQL.Error as pgerr:
|
|
868
|
-
if not check_dberror(pgerr, pgcnt, sqlstr, None, logact): return PgLOG.FAILURE
|
|
868
|
+
if not check_dberror(pgerr, pgcnt, sqlstr, None, logact): return PgLOG.FAILURE
|
|
869
869
|
else:
|
|
870
870
|
break
|
|
871
871
|
pgcnt += 1
|
|
@@ -913,7 +913,7 @@ def pgmget(tablenames, fields, condition = None, logact = PGDBI['ERRLOG']):
|
|
|
913
913
|
records[colname] = vals
|
|
914
914
|
pgcur.close()
|
|
915
915
|
except PgSQL.Error as pgerr:
|
|
916
|
-
if not check_dberror(pgerr, pgcnt, sqlstr, None, logact): return PgLOG.FAILURE
|
|
916
|
+
if not check_dberror(pgerr, pgcnt, sqlstr, None, logact): return PgLOG.FAILURE
|
|
917
917
|
else:
|
|
918
918
|
break
|
|
919
919
|
pgcnt += 1
|
|
@@ -927,7 +927,7 @@ def pgmget(tablenames, fields, condition = None, logact = PGDBI['ERRLOG']):
|
|
|
927
927
|
# tablenames: comma deliminated string of one or more tables
|
|
928
928
|
# fields: comma deliminated string of one or more field names,
|
|
929
929
|
# cnddict: condition dict with field names : values
|
|
930
|
-
# return a dict(field names : values) upon success
|
|
930
|
+
# return a dict(field names : values) upon success
|
|
931
931
|
#
|
|
932
932
|
# retrieve one records from tablenames condition dict
|
|
933
933
|
#
|
|
@@ -961,7 +961,7 @@ def pghget(tablenames, fields, cnddict, logact = PGDBI['ERRLOG']):
|
|
|
961
961
|
record[colname] = val
|
|
962
962
|
pgcur.close()
|
|
963
963
|
except PgSQL.Error as pgerr:
|
|
964
|
-
if not check_dberror(pgerr, pgcnt, sqlstr, values, logact): return PgLOG.FAILURE
|
|
964
|
+
if not check_dberror(pgerr, pgcnt, sqlstr, values, logact): return PgLOG.FAILURE
|
|
965
965
|
else:
|
|
966
966
|
break
|
|
967
967
|
pgcnt += 1
|
|
@@ -980,7 +980,7 @@ def pghget(tablenames, fields, cnddict, logact = PGDBI['ERRLOG']):
|
|
|
980
980
|
# tablenames: comma deliminated string of one or more tables
|
|
981
981
|
# fields: comma deliminated string of one or more field names,
|
|
982
982
|
# cnddicts: condition dict with field names : value lists
|
|
983
|
-
# return a dict(field names : value lists) upon success
|
|
983
|
+
# return a dict(field names : value lists) upon success
|
|
984
984
|
#
|
|
985
985
|
# retrieve multiple records from tablenames for condition dict
|
|
986
986
|
#
|
|
@@ -1060,7 +1060,7 @@ def prepare_update(tablename, fields, condition = None, cndflds = None):
|
|
|
1060
1060
|
for fld in cndflds:
|
|
1061
1061
|
cndset.append("{}=%s".format(pgname(fld, '.')))
|
|
1062
1062
|
condition = " AND ".join(cndset)
|
|
1063
|
-
|
|
1063
|
+
|
|
1064
1064
|
sqlstr = "UPDATE {} SET {} WHERE {}".format(tablename, strflds, condition)
|
|
1065
1065
|
if PgLOG.PGLOG['DBGLEVEL']: PgLOG.pgdbg(1000, sqlstr)
|
|
1066
1066
|
|
|
@@ -1093,7 +1093,7 @@ def pgupdt(tablename, record, condition, logact = PGDBI['ERRLOG']):
|
|
|
1093
1093
|
ucnt = pgcur.rowcount
|
|
1094
1094
|
pgcur.close()
|
|
1095
1095
|
except PgSQL.Error as pgerr:
|
|
1096
|
-
if not check_dberror(pgerr, pgcnt, sqlstr, values, logact): return PgLOG.FAILURE
|
|
1096
|
+
if not check_dberror(pgerr, pgcnt, sqlstr, values, logact): return PgLOG.FAILURE
|
|
1097
1097
|
else:
|
|
1098
1098
|
break
|
|
1099
1099
|
pgcnt += 1
|
|
@@ -1184,7 +1184,7 @@ def pgmupdt(tablename, records, cnddicts, logact = PGDBI['ERRLOG']):
|
|
|
1184
1184
|
execute_batch(pgcur, sqlstr, values, page_size=PGDBI['PGSIZE'])
|
|
1185
1185
|
ucnt = cntrow
|
|
1186
1186
|
except PgSQL.Error as pgerr:
|
|
1187
|
-
if not check_dberror(pgerr, pgcnt, sqlstr, values[0], logact): return PgLOG.FAILURE
|
|
1187
|
+
if not check_dberror(pgerr, pgcnt, sqlstr, values[0], logact): return PgLOG.FAILURE
|
|
1188
1188
|
else:
|
|
1189
1189
|
break
|
|
1190
1190
|
pgcnt += 1
|
|
@@ -1211,7 +1211,7 @@ def prepare_delete(tablename, condition = None, cndflds = None):
|
|
|
1211
1211
|
for fld in cndflds:
|
|
1212
1212
|
cndset.append("{}=%s".format(fld))
|
|
1213
1213
|
condition = " AND ".join(cndset)
|
|
1214
|
-
|
|
1214
|
+
|
|
1215
1215
|
sqlstr = "DELETE FROM {} WHERE {}".format(tablename, condition)
|
|
1216
1216
|
if PgLOG.PGLOG['DBGLEVEL']: PgLOG.pgdbg(1000, sqlstr)
|
|
1217
1217
|
|
|
@@ -1238,7 +1238,7 @@ def pgdel(tablename, condition, logact = PGDBI['ERRLOG']):
|
|
|
1238
1238
|
dcnt = pgcur.rowcount
|
|
1239
1239
|
pgcur.close()
|
|
1240
1240
|
except PgSQL.Error as pgerr:
|
|
1241
|
-
if not check_dberror(pgerr, pgcnt, sqlstr, None, logact): return PgLOG.FAILURE
|
|
1241
|
+
if not check_dberror(pgerr, pgcnt, sqlstr, None, logact): return PgLOG.FAILURE
|
|
1242
1242
|
else:
|
|
1243
1243
|
break
|
|
1244
1244
|
pgcnt += 1
|
|
@@ -1276,7 +1276,7 @@ def pghdel(tablename, cnddict, logact = PGDBI['ERRLOG']):
|
|
|
1276
1276
|
dcnt = pgcur.rowcount
|
|
1277
1277
|
pgcur.close()
|
|
1278
1278
|
except PgSQL.Error as pgerr:
|
|
1279
|
-
if not check_dberror(pgerr, pgcnt, sqlstr, values, logact): return PgLOG.FAILURE
|
|
1279
|
+
if not check_dberror(pgerr, pgcnt, sqlstr, values, logact): return PgLOG.FAILURE
|
|
1280
1280
|
else:
|
|
1281
1281
|
break
|
|
1282
1282
|
pgcnt += 1
|
|
@@ -1316,7 +1316,7 @@ def pgmdel(tablename, cnddicts, logact = PGDBI['ERRLOG']):
|
|
|
1316
1316
|
execute_batch(pgcur, sqlstr, values, page_size=PGDBI['PGSIZE'])
|
|
1317
1317
|
dcnt = len(values)
|
|
1318
1318
|
except PgSQL.Error as pgerr:
|
|
1319
|
-
if not check_dberror(pgerr, pgcnt, sqlstr, values[0], logact): return PgLOG.FAILURE
|
|
1319
|
+
if not check_dberror(pgerr, pgcnt, sqlstr, values[0], logact): return PgLOG.FAILURE
|
|
1320
1320
|
else:
|
|
1321
1321
|
break
|
|
1322
1322
|
pgcnt += 1
|
|
@@ -1350,7 +1350,7 @@ def pgexec(sqlstr, logact = PGDBI['ERRLOG']):
|
|
|
1350
1350
|
ret = pgcur.rowcount
|
|
1351
1351
|
pgcur.close()
|
|
1352
1352
|
except PgSQL.Error as pgerr:
|
|
1353
|
-
if not check_dberror(pgerr, pgcnt, sqlstr, None, logact): return PgLOG.FAILURE
|
|
1353
|
+
if not check_dberror(pgerr, pgcnt, sqlstr, None, logact): return PgLOG.FAILURE
|
|
1354
1354
|
else:
|
|
1355
1355
|
break
|
|
1356
1356
|
pgcnt += 1
|
|
@@ -1405,14 +1405,14 @@ def pgcheck(tablename, logact = 0):
|
|
|
1405
1405
|
return (PgLOG.SUCCESS if ret else PgLOG.FAILURE)
|
|
1406
1406
|
|
|
1407
1407
|
#
|
|
1408
|
-
# group of functions to check parent records and add an empty one if missed
|
|
1408
|
+
# group of functions to check parent records and add an empty one if missed
|
|
1409
1409
|
# return user.uid upon success, 0 otherwise
|
|
1410
1410
|
#
|
|
1411
1411
|
def check_user_uid(userno, date = None):
|
|
1412
1412
|
|
|
1413
1413
|
if not userno: return 0
|
|
1414
1414
|
if type(userno) is str: userno = int(userno)
|
|
1415
|
-
|
|
1415
|
+
|
|
1416
1416
|
if date is None:
|
|
1417
1417
|
datecond = "until_date IS NULL"
|
|
1418
1418
|
date = 'today'
|
|
@@ -1441,7 +1441,7 @@ def check_user_uid(userno, date = None):
|
|
|
1441
1441
|
# return user.uid upon success, 0 otherwise
|
|
1442
1442
|
#
|
|
1443
1443
|
def get_user_uid(logname, date = None):
|
|
1444
|
-
|
|
1444
|
+
|
|
1445
1445
|
if not logname: return 0
|
|
1446
1446
|
if not date:
|
|
1447
1447
|
date = 'today'
|
|
@@ -1451,7 +1451,7 @@ def get_user_uid(logname, date = None):
|
|
|
1451
1451
|
|
|
1452
1452
|
pgrec = pgget("dssdb.user", "uid", "logname = '{}' AND {}".format(logname, datecond), PGDBI['ERRLOG'])
|
|
1453
1453
|
if pgrec: return pgrec['uid']
|
|
1454
|
-
|
|
1454
|
+
|
|
1455
1455
|
if logname not in LMISSES:
|
|
1456
1456
|
PgLOG.pglog("{}: UCAR Login Name NOT on file for {}".format(logname, date), PgLOG.LGWNEM)
|
|
1457
1457
|
LMISSES.append(logname)
|
|
@@ -1505,7 +1505,7 @@ def ucar_user_info(userno, logname = None):
|
|
|
1505
1505
|
pgrec['userno'] = userno
|
|
1506
1506
|
elif pgrec['userno']:
|
|
1507
1507
|
pgrec['userno'] = userno = int(pgrec['userno'])
|
|
1508
|
-
if pgrec['upid']: pgrec['upid'] = int(pgrec['upid'])
|
|
1508
|
+
if pgrec['upid']: pgrec['upid'] = int(pgrec['upid'])
|
|
1509
1509
|
if pgrec['stat_flag']: pgrec['stat_flag'] = 'A' if pgrec['stat_flag'] == "True" else 'C'
|
|
1510
1510
|
if pgrec['email'] and re.search(r'(@|\.)ucar\.edu$', pgrec['email'], re.I):
|
|
1511
1511
|
pgrec['email'] = pgrec['ucaremail']
|
|
@@ -1587,7 +1587,7 @@ def check_wuser_wuid(email, date = None):
|
|
|
1587
1587
|
if pgrec: return pgrec['wuid']
|
|
1588
1588
|
|
|
1589
1589
|
# now add one in
|
|
1590
|
-
record = {'email' : email}
|
|
1590
|
+
record = {'email' : email}
|
|
1591
1591
|
# check again if a ruser is on file
|
|
1592
1592
|
pgrec = pgget("ruser", "*", emcond + " AND end_date IS NULL", PGDBI['ERRLOG'])
|
|
1593
1593
|
if not pgrec: pgrec = pgget("ruser", "*", emcond, PGDBI['ERRLOG'])
|
|
@@ -1628,7 +1628,7 @@ def check_cdp_wuser(username):
|
|
|
1628
1628
|
|
|
1629
1629
|
pgrec = pgget("wuser", "wuid", "cdpname = '{}'".format(username), PGDBI['EXITLG'])
|
|
1630
1630
|
if pgrec: return pgrec['wuid']
|
|
1631
|
-
|
|
1631
|
+
|
|
1632
1632
|
idrec = pgget("wuser", "wuid", "email = '{}'".format(pgrec['email']), PGDBI['EXITLG'])
|
|
1633
1633
|
wuid = idrec['wuid'] if idrec else 0
|
|
1634
1634
|
if wuid > 0:
|
|
@@ -1650,7 +1650,7 @@ def check_cdp_wuser(username):
|
|
|
1650
1650
|
# for given email to get long country name
|
|
1651
1651
|
#
|
|
1652
1652
|
def email_to_country(email):
|
|
1653
|
-
|
|
1653
|
+
|
|
1654
1654
|
ms = re.search(r'\.(\w\w)$', email)
|
|
1655
1655
|
if ms:
|
|
1656
1656
|
pgrec = pgget("countries", "token", "domain_id = '{}'".format(ms.group(1)), PGDBI['EXITLG'])
|
|
@@ -1694,7 +1694,7 @@ def use_rdadb(dsid, logact = 0, vals = None):
|
|
|
1694
1694
|
# return a condition string for a given field
|
|
1695
1695
|
#
|
|
1696
1696
|
def get_field_condition(fld, vals, isstr = 0, noand = 0):
|
|
1697
|
-
|
|
1697
|
+
|
|
1698
1698
|
cnd = wcnd = negative = ''
|
|
1699
1699
|
sign = "="
|
|
1700
1700
|
logic = " OR "
|
|
@@ -1703,7 +1703,7 @@ def get_field_condition(fld, vals, isstr = 0, noand = 0):
|
|
|
1703
1703
|
ncnt = scnt = wcnt = cnt = 0
|
|
1704
1704
|
for i in range(count):
|
|
1705
1705
|
val = vals[i]
|
|
1706
|
-
if val is None or (i > 0 and val == vals[i-1]): continue
|
|
1706
|
+
if val is None or (i > 0 and val == vals[i-1]): continue
|
|
1707
1707
|
if i == 0 and val == PGSIGNS[0]:
|
|
1708
1708
|
negative = "NOT "
|
|
1709
1709
|
logic = " AND "
|
|
@@ -1759,7 +1759,7 @@ def get_field_condition(fld, vals, isstr = 0, noand = 0):
|
|
|
1759
1759
|
if cnt > 0:
|
|
1760
1760
|
if cnt > 1:
|
|
1761
1761
|
cnd = "{} {}IN ({})".format(fld, negative, cnd)
|
|
1762
|
-
else:
|
|
1762
|
+
else:
|
|
1763
1763
|
cnd = "{} {} {}".format(fld, ("<>" if negative else "="), cnd)
|
|
1764
1764
|
if ncnt > 0:
|
|
1765
1765
|
ncnd = "{} IS {}NULL".format(fld, negative)
|
|
@@ -1782,7 +1782,7 @@ def fieldname_string(fnames, dnames = None, anames = None, wflds = None):
|
|
|
1782
1782
|
fnames = anames # include all field names
|
|
1783
1783
|
|
|
1784
1784
|
if not wflds: return fnames
|
|
1785
|
-
|
|
1785
|
+
|
|
1786
1786
|
for wfld in wflds:
|
|
1787
1787
|
if not wfld or fnames.find(wfld) > -1: continue # empty field, or included already
|
|
1788
1788
|
if wfld == "Q":
|
|
@@ -1806,7 +1806,7 @@ def fieldname_string(fnames, dnames = None, anames = None, wflds = None):
|
|
|
1806
1806
|
# dsid: dataset id
|
|
1807
1807
|
# field: path field name: webpath or savedpath)
|
|
1808
1808
|
# go through group tree upward to find a none-empty path, return it or null
|
|
1809
|
-
#
|
|
1809
|
+
#
|
|
1810
1810
|
def get_group_field_path(gindex, dsid, field):
|
|
1811
1811
|
|
|
1812
1812
|
if gindex:
|
|
@@ -1863,7 +1863,7 @@ def build_customized_email(table, field, condition, subject, logact = 0):
|
|
|
1863
1863
|
ebuf = "From: {}\nTo: {}\n".format(sender, receiver)
|
|
1864
1864
|
if cc: ebuf += "Cc: {}\n".format(cc)
|
|
1865
1865
|
ebuf += "Subject: {}!\n\n{}\n".format(subject, msg)
|
|
1866
|
-
|
|
1866
|
+
|
|
1867
1867
|
if PgLOG.PGLOG['EMLSEND']:
|
|
1868
1868
|
estat = PgLOG.send_customized_email(f"{table}.{condition}", ebuf, logact)
|
|
1869
1869
|
if estat != PgLOG.SUCCESS:
|
|
@@ -2065,7 +2065,7 @@ def match_down_path(path, dpaths):
|
|
|
2065
2065
|
# validate is login user is in DECS group
|
|
2066
2066
|
# check all node if skpdsg is false, otherwise check non-DSG nodes
|
|
2067
2067
|
def validate_decs_group(cmdname, logname, skpdsg):
|
|
2068
|
-
|
|
2068
|
+
|
|
2069
2069
|
if skpdsg and PgLOG.PGLOG['DSGHOSTS'] and re.search(r'(^|:){}'.format(PgLOG.PGLOG['HOSTNAME']), PgLOG.PGLOG['DSGHOSTS']): return
|
|
2070
2070
|
if not logname: lgname = PgLOG.PGLOG['CURUID']
|
|
2071
2071
|
|
|
@@ -2074,12 +2074,12 @@ def validate_decs_group(cmdname, logname, skpdsg):
|
|
|
2074
2074
|
|
|
2075
2075
|
#
|
|
2076
2076
|
# add an allusage record into yearly table; create a new yearly table if it does not exist
|
|
2077
|
-
# year -- year to identify the yearly table, evaluated if missing
|
|
2077
|
+
# year -- year to identify the yearly table, evaluated if missing
|
|
2078
2078
|
# records -- hash to hold one or multiple records.
|
|
2079
2079
|
# Dict keys: email -- user email address,
|
|
2080
2080
|
# org_type -- organization type
|
|
2081
2081
|
# country -- country code
|
|
2082
|
-
# dsid -- dataset ID
|
|
2082
|
+
# dsid -- dataset ID
|
|
2083
2083
|
# date -- date data accessed
|
|
2084
2084
|
# time -- time data accessed
|
|
2085
2085
|
# quarter -- quarter of the year data accessed
|
|
@@ -2090,11 +2090,11 @@ def validate_decs_group(cmdname, logname, skpdsg):
|
|
|
2090
2090
|
# ip -- user IP address
|
|
2091
2091
|
# region -- user region name; for example, Colorado
|
|
2092
2092
|
#
|
|
2093
|
-
# isarray -- if true, mutiple records provided via arrays for each hash key
|
|
2093
|
+
# isarray -- if true, mutiple records provided via arrays for each hash key
|
|
2094
2094
|
# docheck -- if 1, check and add only if record is not on file
|
|
2095
2095
|
# docheck -- if 2, check and add if record is not on file, and update if exists
|
|
2096
2096
|
# docheck -- if 4, check and add if record is not on file, and update if exists,
|
|
2097
|
-
# and also checking NULL email value too
|
|
2097
|
+
# and also checking NULL email value too
|
|
2098
2098
|
#
|
|
2099
2099
|
def add_yearly_allusage(year, records, isarray = 0, docheck = 0):
|
|
2100
2100
|
|
|
@@ -2150,7 +2150,7 @@ def add_yearly_allusage(year, records, isarray = 0, docheck = 0):
|
|
|
2150
2150
|
|
|
2151
2151
|
#
|
|
2152
2152
|
# add a wusage record into yearly table; create a new yearly table if it does not exist
|
|
2153
|
-
# year -- year to identify the yearly table, evaluated if missing
|
|
2153
|
+
# year -- year to identify the yearly table, evaluated if missing
|
|
2154
2154
|
# records -- hash to hold one or multiple records.
|
|
2155
2155
|
# Dict keys: wid - reference to wfile.wid
|
|
2156
2156
|
# wuid_read - reference to wuser.wuid, 0 if missing email
|
|
@@ -2163,7 +2163,7 @@ def add_yearly_allusage(year, records, isarray = 0, docheck = 0):
|
|
|
2163
2163
|
# locflag - location flag: Glade or Object
|
|
2164
2164
|
# ip - IP address
|
|
2165
2165
|
#
|
|
2166
|
-
# isarray -- if true, mutiple records provided via arrays for each hash key
|
|
2166
|
+
# isarray -- if true, mutiple records provided via arrays for each hash key
|
|
2167
2167
|
#
|
|
2168
2168
|
def add_yearly_wusage(year, records, isarray = 0):
|
|
2169
2169
|
|
|
@@ -2248,4 +2248,9 @@ def read_pgpass():
|
|
|
2248
2248
|
dbhost, dbport, dbname, lnname, pwname = line.split(":")
|
|
2249
2249
|
DBPASS[(dbhost, dbport, dbname, lnname)] = pwname
|
|
2250
2250
|
except FileNotFoundError:
|
|
2251
|
-
|
|
2251
|
+
with open(PgLOG.PGLOG['GDEXHOME'] + '/.pgpass', "r") as f:
|
|
2252
|
+
for line in f:
|
|
2253
|
+
line = line.strip()
|
|
2254
|
+
if not line or line.startswith("#"): continue
|
|
2255
|
+
dbhost, dbport, dbname, lnname, pwname = line.split(":")
|
|
2256
|
+
DBPASS[(dbhost, dbport, dbname, lnname)] = pwname
|
|
@@ -1636,7 +1636,7 @@ def local_file_stat(file, fstat, opt, logact):
|
|
|
1636
1636
|
|
|
1637
1637
|
info = {}
|
|
1638
1638
|
info['isfile'] = (1 if stat.S_ISREG(fstat.st_mode) else 0)
|
|
1639
|
-
info['data_size'] = fstat.st_size
|
|
1639
|
+
info['data_size'] = fstat.st_size if info['isfile'] else local_path_size(file)
|
|
1640
1640
|
info['fname'] = op.basename(file)
|
|
1641
1641
|
if not opt: return info
|
|
1642
1642
|
if opt&64 and info['isfile'] and info['data_size'] < PgLOG.PGLOG['MINSIZE']:
|
|
@@ -1665,6 +1665,18 @@ def local_file_stat(file, fstat, opt, logact):
|
|
|
1665
1665
|
|
|
1666
1666
|
return info
|
|
1667
1667
|
|
|
1668
|
+
#
|
|
1669
|
+
# get total size of files under a given path
|
|
1670
|
+
#
|
|
1671
|
+
def local_path_size(pname):
|
|
1672
|
+
|
|
1673
|
+
if not pname: pname = '.' # To get size of current directory
|
|
1674
|
+
size = 0
|
|
1675
|
+
for path, dirs, files in os.walk(pname):
|
|
1676
|
+
for f in files:
|
|
1677
|
+
size += os.path.getsize(os.path.join(path, f))
|
|
1678
|
+
return size
|
|
1679
|
+
|
|
1668
1680
|
#
|
|
1669
1681
|
# check and get file status information of a file on remote host
|
|
1670
1682
|
#
|
|
@@ -2557,7 +2569,7 @@ def rda_file_size(file, host, opt = 0, logact = 0):
|
|
|
2557
2569
|
#
|
|
2558
2570
|
def local_file_size(file, opt = 0, logact = 0):
|
|
2559
2571
|
|
|
2560
|
-
if not op.
|
|
2572
|
+
if not op.exists(file):
|
|
2561
2573
|
if opt&4: lmsg(file, PgLOG.PGLOG['MISSFILE'], logact)
|
|
2562
2574
|
return -1 # file not eixsts
|
|
2563
2575
|
|
|
@@ -307,7 +307,7 @@ def send_python_email(subject = None, receiver = None, msg = None, sender = None
|
|
|
307
307
|
else:
|
|
308
308
|
return ''
|
|
309
309
|
|
|
310
|
-
docc = False if cc else True
|
|
310
|
+
docc = False if cc else True
|
|
311
311
|
if not sender:
|
|
312
312
|
sender = PGLOG['CURUID']
|
|
313
313
|
if sender != PGLOG['RDAUSER']: docc = False
|
|
@@ -1271,6 +1271,7 @@ def set_common_pglog():
|
|
|
1271
1271
|
PGLOG['ALLROOTS'] = '|'.join([PGLOG['OLDAROOT'], PGLOG['OLDBROOT'], PGLOG['ARCHROOT'], PGLOG['BACKROOT']])
|
|
1272
1272
|
SETPGLOG("USRHOME", "/glade/u/home")
|
|
1273
1273
|
SETPGLOG("DSSHOME", "/glade/u/home/rdadata")
|
|
1274
|
+
SETPGLOG("GDEXHOME", "/data/local")
|
|
1274
1275
|
SETPGLOG("ADDPATH", "")
|
|
1275
1276
|
SETPGLOG("ADDLIB", "")
|
|
1276
1277
|
SETPGLOG("OTHPATH", "")
|
|
@@ -1372,7 +1373,7 @@ def set_common_pglog():
|
|
|
1372
1373
|
|
|
1373
1374
|
# empty diretory for HOST-sync
|
|
1374
1375
|
|
|
1375
|
-
PGLOG['TMPSYNC'] = PGLOG['DSSDBHM'] + "/tmp/.syncdir"
|
|
1376
|
+
PGLOG['TMPSYNC'] = PGLOG['DSSDBHM'] + "/tmp/.syncdir"
|
|
1376
1377
|
|
|
1377
1378
|
os.umask(2)
|
|
1378
1379
|
|
|
@@ -1669,7 +1669,7 @@ def send_request_email_notice(pgrqst, errmsg, fcount, rstat, readyfile = None, p
|
|
|
1669
1669
|
if pgrec:
|
|
1670
1670
|
einfo['SPECIALIST'] = "{} {}".format(pgrec['fstname'], pgrec['lstname'])
|
|
1671
1671
|
einfo['PHONENO'] = pgrec['phoneno']
|
|
1672
|
-
einfo['SUBJECT'] += " {
|
|
1672
|
+
einfo['SUBJECT'] += f" {pgrqst['dsid']} {einfo['RTYPE']} request {pgrqst['rindex']}"
|
|
1673
1673
|
|
|
1674
1674
|
if pgrqst['note']:
|
|
1675
1675
|
einfo['RNOTE'] = "\nRequest Detail:\n{}\n".format(pgrqst['note'])
|
{rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common.egg-info/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rda_python_common
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.43
|
|
4
4
|
Summary: RDA Python common library codes shared by other RDA python packages
|
|
5
5
|
Author-email: Zaihua Ji <zji@ucar.edu>
|
|
6
6
|
Project-URL: Homepage, https://github.com/NCAR/rda-python-common
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common.egg-info/SOURCES.txt
RENAMED
|
File without changes
|
|
File without changes
|
{rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common.egg-info/requires.txt
RENAMED
|
File without changes
|
{rda_python_common-1.0.41 → rda_python_common-1.0.43}/src/rda_python_common.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|