rda-python-common 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rda-python-common might be problematic. Click here for more details.

@@ -0,0 +1,1719 @@
1
+ #
2
+ ###############################################################################
3
+ #
4
+ # Title : PgOPT.py
5
+ #
6
+ # Author : Zaihua Ji, zji@ucar.edu
7
+ # Date : 08/26/2020
8
+ # Purpose : python library module for holding global varaibles
9
+ # functions for processing options and other global functions
10
+ #
11
+ # Work File : $DSSHOME/lib/python/PgOPT.py
12
+ # Github : https://github.com/NCAR/rda-shared-libraries.git
13
+ #
14
+ ###############################################################################
15
+ #
16
+ import os
17
+ import sys
18
+ import re
19
+ import time
20
+ from os import path as op
21
+ import PgLOG
22
+ import PgUtil
23
+ import PgFile
24
+ import PgDBI
25
+
26
+ OUTPUT = None
27
+ CMDOPTS = {}
28
+ INOPTS = {}
29
+
30
+ # global variables are used by all applications and this package.
31
+ # they need be initialized in application specified packages
32
+ ALIAS = {}
33
+ TBLHASH = {}
34
+
35
+ ###############################################################################
36
+ # valid options the first hash value: 0 means mode option, 1 means single-value
37
+ # option, 2 means multiple-value option, and >=4 means action option the second
38
+ # hash values are long option names, either hash keys (considered as short
39
+ # option names) or the associated long names can be used. All options, except for
40
+ # multi-line value ones, can be specified on command line, while single-value and
41
+ # multi-value options, except option -IM for input files, can also given in input
42
+ # files long value option names are used in output files all letters of option
43
+ # names are case insensitive.
44
+ #
45
+ # The third hash value define bit flags,
46
+ # For Action Options:
47
+ # -1 - VSN card actions
48
+ # >0 - setions
49
+ #
50
+ # For Mode Options:
51
+ # 1 - mode for archiving actions
52
+ # 2 - mode for set actions
53
+ #
54
+ # For Single-Value Info Options:
55
+ # 1(0x001) - auto set value
56
+ # 2(0x002) - manually set value
57
+ # 16(0x010) - convert to integer from commandline and input files, set to 0 if empty
58
+ # 32(0x020) - time field
59
+ # 128(0x080) - '' allowed for single letter value
60
+ # 256(0x100) - date field
61
+ #
62
+ # For Multi-Value Info Options:
63
+ # 1(0x001) - one for multiple
64
+ # 2(0x002) - auto-set,
65
+ # 4(0x004) - expanded from one
66
+ # 8(0x008) - validated
67
+ # 16(0x010) - convert to integer from commandline and input files, set to 0 if empty
68
+ # 32(0x020) - time field
69
+ # 64(0x040) - text field allowing multiple lines
70
+ # 128(0x080) - '' allowed for single letter value
71
+ # 256(0x100) - date field
72
+ #
73
+ # The fourth hash values defined retrictions for single letter values
74
+ ###############################################################################
75
+ OPTS = {}
76
+
77
+ # global initial optional values
78
+ PGOPT = {
79
+ 'ACTS' : 0, # carry current action bits
80
+ 'UACTS' : 0, # carry dsarch skip check UD action bits
81
+ 'CACT' : '', # current short action name
82
+ 'IFCNT' : 0, # 1 to read a single Input File at a time
83
+ 'ANAME' : '', # cache the application name if set
84
+ 'TABLE' : '', # table name the action is on
85
+ 'UID' : 0, # user.uid
86
+ 'MSET' : 'SA', # Action for multiple sets
87
+ 'WIDTH' : 128, # max column width
88
+ 'TXTBIT' : 64, # text field bit (0x1000) allow multiple lines
89
+ 'PEMAX' : 12, # max count of reuqest partition errors for auto reprocesses
90
+ 'REMAX' : 2, # max count of reuqest errors for auto reprocesses
91
+ 'RSMAX' : 2000, # max count of gatherxml with options -R -S
92
+ 'RCNTL' : None, # placehold for a request control record
93
+ 'dcm' : "dcm",
94
+ 'sdp' : "sdp",
95
+ 'rcm' : "rcm",
96
+ 'scm' : "scm",
97
+ 'wpg' : "",
98
+ 'gatherxml' : "gatherxml",
99
+ 'cosconvert' : "cosconvert",
100
+ 'emllog' : PgLOG.LGWNEM,
101
+ 'emlerr' : PgLOG.LOGERR|PgLOG.EMEROL,
102
+ 'emerol' : PgLOG.LOGWRN|PgLOG.EMEROL,
103
+ 'emlsum' : PgLOG.LOGWRN|PgLOG.EMLSUM,
104
+ 'emlsep' : PgLOG.LGWNEM|PgLOG.SEPLIN,
105
+ 'wrnlog' : PgLOG.LOGWRN,
106
+ 'errlog' : PgLOG.LOGERR,
107
+ 'extlog' : PgLOG.LGEREX,
108
+ 'PTYPE' : "CPRV",
109
+ 'WDTYP' : "ADNU",
110
+ 'HFTYP' : "DS",
111
+ 'SDTYP' : "PORWUV",
112
+ 'GXTYP' : "DP"
113
+ }
114
+
115
+ # global default parameters
116
+ params = {
117
+ 'ES' : "<=>",
118
+ 'AO' : "<!>",
119
+ 'DV' : "<:>"
120
+ }
121
+
122
+ WTYPE = {
123
+ 'A' : "ARCO",
124
+ 'D' : "DATA",
125
+ 'N' : "NCAR",
126
+ 'U' : "UNKNOWN",
127
+ }
128
+
129
+ HTYPE = {
130
+ 'D' : "DOCUMENT",
131
+ 'S' : "SOFTWARE",
132
+ 'U' : "UNKNOWN"
133
+ }
134
+
135
+ HPATH = {
136
+ 'D' : "docs",
137
+ 'S' : "software",
138
+ 'U' : "help"
139
+ }
140
+
141
+ MTYPE = {
142
+ 'P' : "PRIMARY",
143
+ 'A' : "ARCHIVING",
144
+ 'V' : "VERSION",
145
+ 'W' : "WORKING",
146
+ 'R' : "ORIGINAL",
147
+ 'B' : "BACKUP",
148
+ 'O' : "OFFSITE",
149
+ 'C' : "CHRONOPOLIS",
150
+ 'U' : "UNKNOWN"
151
+ }
152
+
153
+ STYPE = {
154
+ 'O' : "OFFLINE",
155
+ 'P' : "PRIMARY",
156
+ 'R' : "ORIGINAL",
157
+ 'V' : "VERSION",
158
+ 'W' : "WORKING",
159
+ 'U' : "UNKNOWN"
160
+ }
161
+
162
+ BTYPE = {
163
+ 'B' : "BACKUPONLY",
164
+ 'D' : "BACKDRDATA",
165
+ }
166
+
167
+ #
168
+ # process and parsing input information
169
+ # aname - application name such as 'dsarch', 'dsupdt', and 'dsrqst'
170
+ #
171
+ def parsing_input(aname):
172
+
173
+ PgLOG.PGLOG['LOGFILE'] = aname + ".log"
174
+ PGOPT['ANAME'] = aname
175
+ PgDBI.dssdb_dbname()
176
+ argv = sys.argv[1:]
177
+ if not argv: PgLOG.show_usage(aname)
178
+
179
+ PgLOG.cmdlog("{} {}".format(aname, ' '.join(argv)))
180
+
181
+ # process command line options to fill option values
182
+ option = infile = None
183
+ needhelp = 0
184
+ helpopts = {}
185
+ for param in argv:
186
+ if re.match(r'^(-{0,2}help|-H)$', param, re.I):
187
+ if option: helpopts[option] = OPTS[option]
188
+ needhelp = 1
189
+ continue
190
+
191
+ ms = re.match(r'^-([a-zA-Z]\w*)$', param)
192
+ if ms: # option parameter
193
+ param = ms.group(1)
194
+ if option and not needhelp and option not in params:
195
+ val = get_default_info(option)
196
+ if val is not None:
197
+ set_option_value(option, val)
198
+ else:
199
+ parameter_error("-" + option, "missval")
200
+ option = get_option_key(param)
201
+ if needhelp:
202
+ helpopts[option] = OPTS[option]
203
+ break
204
+
205
+ # set mode/action options
206
+ if OPTS[option][0]&3 == 0: set_option_value(option)
207
+
208
+ elif option:
209
+ ms =re.match(r"^\'(.*)\'$", param)
210
+ if ms: param = ms.group(1)
211
+ set_option_value(option, param)
212
+
213
+ elif PgUtil.find_dataset_id(param):
214
+ set_option_value('DS', param)
215
+
216
+ else:
217
+ option = get_option_key(param, 3, 1)
218
+ if option:
219
+ set_option_value(option)
220
+ if needhelp:
221
+ helpopts[option] = OPTS[option]
222
+ break
223
+ elif op.exists(param): # assume input file
224
+ infile = param
225
+ else:
226
+ parameter_error(param)
227
+
228
+ if needhelp: PgLOG.show_usage(aname, helpopts)
229
+
230
+ if option and option not in params:
231
+ val = get_default_info(option)
232
+ if val is not None:
233
+ set_option_value(option, val)
234
+ else:
235
+ parameter_error("-" + option, "missval")
236
+
237
+ # check if only an input filename is given on command line following aname
238
+ if infile:
239
+ if 'IF' in params:
240
+ parameter_error(infile)
241
+ else:
242
+ params['IF'] = [infile]
243
+
244
+ # process given one or multiple input files to fill option values
245
+ if 'IF' in params:
246
+ PGOPT['IFCNT'] = 1 if PGOPT['CACT'] == 'AQ' else 0
247
+ if OPTS['DS'][0] == 1:
248
+ param = validate_infile_names(params['DS']) if 'DS' in params else 0
249
+ else:
250
+ param = 1
251
+ get_input_info(params['IF'])
252
+ if not param and 'DS' in params: validate_infile_names(params['DS'])
253
+
254
+ if not PGOPT['ACTS']: parameter_error(aname, "missact") # no action enter
255
+
256
+ if 'DB' in params:
257
+ dcnt = len(params['DB'])
258
+ for i in range(dcnt):
259
+ if i == 0:
260
+ PgLOG.PGLOG['DBGLEVEL'] = params['DB'][0]
261
+ elif i == 1:
262
+ PgLOG.PGLOG['DBGPATH'] = params['DB'][1]
263
+ elif i == 2:
264
+ PgLOG.PGLOG['DBGFILE'] = params['DB'][2]
265
+ PgLOG.pgdbg(PgLOG.PGLOG['DBGLEVEL'])
266
+
267
+ if 'GZ' in params: PgLOG.PGLOG['GMTZ'] = PgUtil.diffgmthour()
268
+ if 'BG' in params: PgLOG.PGLOG['BCKGRND'] = 1
269
+
270
+ #
271
+ # check and get default value for info option, return None if not available
272
+ #
273
+ def get_default_info(opt):
274
+
275
+ olist = OPTS[opt]
276
+ if olist[0]&3 and len(olist) > 3:
277
+ odval = olist[3]
278
+ if not odval or isinstance(odval, int):
279
+ return odval
280
+ else:
281
+ return odval[0] # return the first char of a default string
282
+
283
+ return None
284
+
285
+ #
286
+ # set output file name handler now
287
+ #
288
+ def open_output(outfile = None):
289
+
290
+ global OUTPUT
291
+
292
+ if outfile: # result output file
293
+ try:
294
+ OUTPUT = open(outfile, 'w')
295
+ except Exception as e:
296
+ PgLOG.pglog("{}: Error open file to write - {}".format(outfile, str(e)), PGOPT['extlog'])
297
+ else: # result to STDOUT
298
+ OUTPUT = sys.stdout
299
+
300
+ #
301
+ # return 1 if valid infile names; sys.exit(1) otherwise
302
+ #
303
+ def validate_infile_names(dsid):
304
+
305
+ i = 0
306
+ for infile in params['IF']:
307
+ if not validate_one_infile(infile, dsid): return PgLOG.FAILURE
308
+ i += 1
309
+ if PGOPT['IFCNT'] and i >= PGOPT['IFCNT']: break
310
+
311
+ return i
312
+
313
+ #
314
+ # validate an input filename against dsid
315
+ #
316
+ def validate_one_infile(infile, dsid):
317
+
318
+ ndsid = PgUtil.find_dataset_id(infile)
319
+ if ndsid == None:
320
+ return PgLOG.pglog("{}: No dsid identified in Input file name {}!".format(dsid, infile), PGOPT['extlog'])
321
+
322
+ fdsid = PgUtil.format_dataset_id(ndsid)
323
+ if fdsid != dsid:
324
+ return PgLOG.pglog("{}: Different dsid {} found in Input file name {}!".format(dsid, fdsid, infile), PGOPT['extlog'])
325
+
326
+ return PgLOG.SUCCESS
327
+
328
+ #
329
+ # gather input information from input files
330
+ #
331
+ def get_input_info(infiles, table = None):
332
+
333
+ i = 0
334
+ for file in infiles:
335
+ i += process_infile(file, table)
336
+ if not PGOPT['IFCNT'] and PGOPT['CACT'] == 'AQ': PGOPT['IFCNT'] = 1
337
+ if PGOPT['IFCNT']: break
338
+
339
+ return i
340
+
341
+ #
342
+ # validate and get info from a single input file
343
+ #
344
+ def read_one_infile(infile):
345
+
346
+ dsid = params['DS']
347
+ del params['DS']
348
+ if OPTS['DS'][2]&2: OPTS['DS'][2] &= ~2
349
+ if 'DS' in CMDOPTS: del CMDOPTS['DS']
350
+ clean_input_values()
351
+ process_infile(infile)
352
+ if 'DS' in params: dsid = params['DS']
353
+ if dsid: validate_one_infile(infile, dsid)
354
+
355
+ return dsid
356
+
357
+ #
358
+ # gather input option values from one input file
359
+ #
360
+ # return 0 if nothing retireved if table is not null
361
+ #
362
+ def process_infile(infile, table = None):
363
+
364
+ if not op.exists(infile): PgLOG.pglog(infile + ": Input file not exists", PGOPT['extlog'])
365
+ if table:
366
+ PgLOG.pglog("Gather '{}' information from input file '{}'..." .format(table, infile), PGOPT['wrnlog'])
367
+ else:
368
+ PgLOG.pglog("Gather information from input file '{}'...".format(infile), PGOPT['wrnlog'])
369
+
370
+ try:
371
+ fd = open(infile, 'r')
372
+ except Exception as e:
373
+ PgLOG.pglog("{}: Error Open input file - {}!".format(infile, str(e)), PGOPT['extlog'])
374
+ else:
375
+ lines = fd.readlines()
376
+ fd.close()
377
+
378
+ opt = None
379
+ columns = []
380
+ chktbl = 1 if table else -1
381
+ mpes = r'^(\w+)\s*{}\s*(.*)$'.format(params['ES'])
382
+ mpao = r'^(\w+)\s*{}'.format(params['AO'])
383
+ # column count, column index, value count, value index, line index, option-set count, end divider flag
384
+ colcnt = colidx = valcnt = validx = linidx = setcnt = enddiv = 0
385
+ for line in lines:
386
+ linidx += 1
387
+ if linidx%50000 == 0:
388
+ PgLOG.pglog("{}: {} lines read".format(infile, linidx), PGOPT['wrnlog'])
389
+ if 'NT' not in params: line = PgLOG.pgtrim(line, 2)
390
+ if not line:
391
+ if opt: set_option_value(opt, '', 1, linidx, line, infile)
392
+ continue # skip empty lines
393
+ if chktbl > 0:
394
+ if re.match(r'^\[{}\]$'.format(table), line, re.I): # found entry for table
395
+ chktbl = 0
396
+ clean_input_values() # clean previously saved input values
397
+ continue
398
+ else:
399
+ ms = re.match(r'^\[(\w+)\]$', line)
400
+ if ms:
401
+ if chktbl == 0: break # stop at next sub-title
402
+ if not PGOPT['MSET']:
403
+ input_error(linidx, line, infile, ms.group(1) + ": Cannt process sub-title")
404
+ elif PGOPT['CACT'] != PGOPT['MSET']:
405
+ input_error(linidx, line, infile, "Use Action -{} to Set multiple sub-titles".format(PGOPT['MSET']))
406
+ break # stop getting info if no table given or a different table
407
+
408
+ if colcnt == 0: # check single value and action lines first
409
+ ms = re.match(mpes, line)
410
+ if ms: # one value assignment
411
+ key = ms.group(1).strip()
412
+ val = ms.group(2)
413
+ if val and 'NT' not in params: val = val.strip()
414
+ opt = get_option_key(key, 1, 0, linidx, line, infile, table)
415
+ set_option_value(opt, val, 0, linidx, line, infile)
416
+ if not OPTS[opt][2]&PGOPT['TXTBIT']: opt = None
417
+ setcnt += 1
418
+ continue
419
+
420
+ ms = re.match(mpao, line)
421
+ if ms: # set mode or action option
422
+ key = get_option_key(ms.group(1).strip(), 4, 0, linidx, line, infile, table)
423
+ set_option_value(key, '', 0, linidx, line, infile)
424
+ setcnt += 1
425
+ continue
426
+
427
+ # check mutiple value assignment for one or more multi-value options
428
+ values = line.split(params['DV'])
429
+ valcnt = len(values)
430
+ if colcnt == 0:
431
+ while colcnt < valcnt:
432
+ key = values[colcnt].strip()
433
+ if not key: break
434
+ opt = get_option_key(key, 2, 1, linidx, line, infile, table)
435
+ if not opt: break
436
+ columns.append(opt)
437
+ if opt in params: del params[opt]
438
+ colcnt += 1
439
+ if colcnt < valcnt:
440
+ if colcnt == (valcnt-1):
441
+ enddiv = 1
442
+ else:
443
+ input_error(linidx, line, infile, "Multi-value Option Name missed for column {}".format(colcnt+1))
444
+ opt = None
445
+ continue
446
+
447
+ elif valcnt == 1:
448
+ if re.match(mpes, line):
449
+ input_error(linidx, line, infile, "Cannot set single value option after Multi-value Options")
450
+ elif re.match(mpao, line):
451
+ input_error(linidx, line, infile, "Cannot set acttion/mode option after Multi-value Options")
452
+
453
+ if opt: # add to multipe line value
454
+ val = values.pop(0)
455
+ valcnt -= 1
456
+ if val and 'NT' not in params: val = val.strip()
457
+ set_option_value(opt, val, 1, linidx, line, infile)
458
+ setcnt += 1
459
+ if valcnt == 0: continue # continue to check multiple line value
460
+ colidx += 1
461
+ opt = None
462
+
463
+ reduced = 0
464
+ valcnt += colidx
465
+ if valcnt > colcnt:
466
+ if enddiv:
467
+ val = values.pop()
468
+ if not val.strip():
469
+ valcnt -= 1
470
+ reduced = 1
471
+ if valcnt > colcnt:
472
+ input_error(linidx, line, infile, "Too many values({}) provided for {} columns".format(valcnt+colidx, colcnt))
473
+
474
+ if values:
475
+ for val in values:
476
+ opt = columns[colidx]
477
+ colidx += 1
478
+ if val and 'NT' not in params: val = val.strip()
479
+ set_option_value(opt, val, 0, linidx, line, infile)
480
+ setcnt += 1
481
+ colidx += (reduced-enddiv)
482
+
483
+ if colidx == colcnt:
484
+ colidx = 0 # done with gathering values of a multi-value line
485
+ opt = None
486
+ elif opt and not OPTS[opt][2]&PGOPT['TXTBIT']:
487
+ colidx += 1
488
+ opt = None
489
+
490
+ if setcnt > 0:
491
+ if colidx:
492
+ if colidx < colcnt:
493
+ input_error(linidx, '', infile, "{} of {} values missed".format(colcnt-colidx, colcnt))
494
+ elif enddiv:
495
+ input_error(linidx, '', infile, "Miss end divider '{}'".format(params['DV']))
496
+ return 1 # read something
497
+ else:
498
+ if table: PgLOG.pglog("No option information found for '{}'".format(table), PgLOG.WARNLG)
499
+ return 0 # read nothing
500
+
501
+ #
502
+ # clean params for input option values when set mutiple tables
503
+ #
504
+ def clean_input_values():
505
+
506
+ global INOPTS
507
+ # clean previously saved input values if any
508
+ for opt in INOPTS:
509
+ del params[opt]
510
+ INOPTS = {}
511
+
512
+ #
513
+ # build a hash record for add or update of a table record
514
+ #
515
+ def build_record(flds, pgrec, tname, idx = 0):
516
+
517
+ record = {}
518
+ if not flds: return record
519
+
520
+ hash = TBLHASH[tname]
521
+
522
+ for key in flds:
523
+ if key not in hash: continue
524
+ opt = hash[key][0]
525
+ field = hash[key][3] if len(hash[key]) == 4 else hash[key][1]
526
+ ms = re.search(r'\.(.+)$', field)
527
+ if ms: field = ms.group(1)
528
+ if opt in params:
529
+ if OPTS[opt][0] == 1:
530
+ val = params[opt]
531
+ else:
532
+ if OPTS[opt][2]&2 and pgrec and field in pgrec and pgrec[field]: continue
533
+ val = params[opt][idx]
534
+ sval = pgrec[field] if pgrec and field in pgrec else None
535
+ if sval is None:
536
+ if val == '': val = None
537
+ elif isinstance(sval, int):
538
+ if isinstance(val, str): val = (int(val) if val else None) # change '' to None for int
539
+ if PgUtil.pgcmp(sval, val, 1): record[field] = val # record new or changed value
540
+
541
+ return record
542
+
543
+ #
544
+ # set global variable PGOPT['UID'] with value of user.uid, fatal if unsuccessful
545
+ #
546
+ def set_uid(aname):
547
+
548
+ set_email_logact()
549
+
550
+ if 'LN' not in params:
551
+ params['LN'] = PgLOG.PGLOG['CURUID']
552
+ elif params['LN'] != PgLOG.PGLOG['CURUID']:
553
+ params['MD'] = 1 # make sure this set if running as another user
554
+ if 'NE' not in params: PgLOG.PGLOG['EMLADDR'] = params['LN']
555
+ if 'DM' in params and re.match(r'^(start|begin)$', params['DM'], re.I):
556
+ msg = "'{}' must start Daemon '{} -{}' as '{}'".format(PgLOG.PGLOG['CURUID'], aname, PGOPT['CACT'], params['LN'])
557
+ else:
558
+ msg = "'{}' runs '{} -{}' as '{}'!".format(PgLOG.PGLOG['CURUID'], aname, PGOPT['CACT'], params['LN'])
559
+ PgLOG.pglog(msg, PGOPT['wrnlog'])
560
+ PgLOG.set_specialist_environments(params['LN'])
561
+
562
+ if 'LN' not in params: PgLOG.pglog("Could not get user login name", PGOPT['extlog'])
563
+
564
+ validate_dataset()
565
+ if OPTS[PGOPT['CACT']][2] > 0: validate_dsowner(aname)
566
+
567
+ pgrec = PgDBI.pgget("dssdb.user", "uid", "logname = '{}' AND until_date IS NULL".format(params['LN']), PGOPT['extlog'])
568
+ if not pgrec: PgLOG.pglog("Could not get user.uid for " + params['LN'], PGOPT['extlog'])
569
+ PGOPT['UID'] = pgrec['uid']
570
+
571
+ open_output(params['OF'] if 'OF' in params else None)
572
+
573
+ #
574
+ # set global variable PGOPT['UID'] as 0 for a sudo user
575
+ #
576
+ def set_sudo_uid(aname, uid):
577
+
578
+ set_email_logact()
579
+
580
+ if PgLOG.PGLOG['CURUID'] != uid:
581
+ if 'DM' in params and re.match(r'^(start|begin)$', params['DM'], re.I):
582
+ msg = "'{}': must start Daemon '{} -{} as '{}'".format(PgLOG.PGLOG['CURUID'], aname, params['CACT'], uid)
583
+ else:
584
+ msg = "'{}': must run '{} -{}' as '{}'".format(PgLOG.PGLOG['CURUID'], aname, params['CACT'], uid)
585
+ PgLOG.pglog(msg, PGOPT['extlog'])
586
+
587
+ PGOPT['UID'] = 0
588
+ params['LN'] = PgLOG.PGLOG['CURUID']
589
+
590
+ #
591
+ # set global variable PGOPT['UID'] as 0 for root user
592
+ #
593
+ def set_root_uid(aname):
594
+
595
+ set_email_logact()
596
+
597
+ if PgLOG.PGLOG['CURUID'] != "root":
598
+ if 'DM' in params and re.match(r'^(start|begin)$', params['DM'], re.I):
599
+ msg = "'{}': you must start Daemon '{} -{} as 'root'".format(PgLOG.PGLOG['CURUID'], aname, params['CACT'])
600
+ else:
601
+ msg = "'{}': you must run '{} -{}' as 'root'".format(PgLOG.PGLOG['CURUID'], aname, params['CACT'])
602
+ PgLOG.pglog(msg, PGOPT['extlog'])
603
+
604
+ PGOPT['UID'] = 0
605
+ params['LN'] = PgLOG.PGLOG['CURUID']
606
+
607
+ #
608
+ # set email logging bits
609
+ #
610
+ def set_email_logact():
611
+
612
+ if 'NE' in params:
613
+ PgLOG.PGLOG['LOGMASK'] &= ~PgLOG.EMLALL # remove all email bits
614
+ elif 'SE' in params:
615
+ PgLOG.PGLOG['LOGMASK'] &= ~PgLOG.EMLLOG # no normal email
616
+
617
+ #
618
+ # validate dataset owner
619
+ #
620
+ # return: 0 or fatal if not valid, 1 if valid, -1 if can not be validated
621
+ #
622
+ def validate_dsowner(aname, dsid = None, logname = None, pgds = 0, logact = 0):
623
+
624
+ if not logname: logname = (params['LN'] if 'LN' in params else PgLOG.PGLOG['CURUID'])
625
+ if logname == PgLOG.PGLOG['RDAUSER']: return 1
626
+
627
+ dsids = {}
628
+ if dsid:
629
+ dsids[dsid] = 1
630
+ elif 'DS' in params:
631
+ if OPTS['DS'][0] == 2:
632
+ for dsid in params['DS']:
633
+ dsids[dsid] = 1
634
+ else:
635
+ dsids[params['DS']] = 1
636
+ else:
637
+ return -1
638
+
639
+ if not pgds and 'MD' in params: pgds = 1
640
+ if not logact: logact = PGOPT['extlog']
641
+
642
+ for dsid in dsids:
643
+ if not PgDBI.pgget("dsowner", "", "dsid = '{}' AND specialist = '{}'".format(dsid, logname), PGOPT['extlog']):
644
+ if not PgDBI.pgget("dssgrp", "", "logname = '{}'".format(logname), PGOPT['extlog']):
645
+ return PgLOG.pglog("'{}' is not DSS Specialist!".format(logname), logact)
646
+ elif not pgds:
647
+ return PgLOG.pglog("'{}' not listed as Specialist of '{}'\nRun '{}' with Option -MD!".format(logname, dsid, aname), logact)
648
+
649
+ return 1
650
+
651
+ #
652
+ # validate dataset
653
+ #
654
+ def validate_dataset():
655
+
656
+ cnt = 1
657
+ if 'DS' in params:
658
+ if OPTS['DS'][0] == 2:
659
+ for dsid in params['DS']:
660
+ cnt = PgDBI.pgget("dataset", "", "dsid = '{}'".format(dsid), PGOPT['extlog'])
661
+ if cnt == 0: break
662
+ else:
663
+ dsid = params['DS']
664
+ cnt = PgDBI.pgget("dataset", "", "dsid = '{}'".format(dsid), PGOPT['extlog'])
665
+
666
+ if not cnt: PgLOG.pglog(dsid + " not exists in RDADB!", PGOPT['extlog'])
667
+
668
+ #
669
+ # validate given group indices or group names
670
+ #
671
+ def validate_groups(parent = 0):
672
+
673
+ if parent:
674
+ gi = 'PI'
675
+ gn = 'PN'
676
+ else:
677
+ gi = 'GI'
678
+ gn = 'GN'
679
+ if (OPTS[gi][2]&8): return # already validated
680
+
681
+ dcnd = "dsid = '{}'".format(params['DS'])
682
+ if gi in params:
683
+ grpcnt = len(params[gi])
684
+ i = 0
685
+ while i < grpcnt:
686
+ gidx = params[gi][i]
687
+ if not isinstance(gidx, int) and re.match(r'^(!|<|>|<>)$', gidx): break
688
+ i += 1
689
+ if i >= grpcnt: # normal group index given
690
+ for i in range(grpcnt):
691
+ gidx = params[gi][i]
692
+ gidx = int(gidx) if gidx else 0
693
+ params[gi][i] = gidx
694
+ if gidx == 0 or (i > 0 and gidx == params[gi][i-1]): continue
695
+ if not PgDBI.pgget("dsgroup", '', "{} AND gindex = {}".format(dcnd, gidx), PGOPT['extlog']):
696
+ if i > 0 and parent and params['GI']:
697
+ j = 0
698
+ while j < i:
699
+ if gidx == params['GI'][j]: break
700
+ j += 1
701
+ if j < i: continue
702
+ PgLOG.pglog("Group Index {} not in RDADB for {}".format(gidx, params['DS']), PGOPT['extlog'])
703
+ else: # found none-equal condition sign
704
+ pgrec = PgDBI.pgmget("dsgroup", "DISTINCT gindex", dcnd + PgDBI.get_field_condition("gindex", params[gi]), PGOPT['extlog'])
705
+ grpcnt = (len(pgrec['gindex']) if pgrec else 0)
706
+ if grpcnt == 0:
707
+ PgLOG.pglog("No Group matches given Group Index condition for " + params['DS'], PGOPT['extlog'])
708
+
709
+ params[gi] = pgrec['gindex']
710
+ elif gn in params:
711
+ params[gi] = group_id_to_index(params[gn])
712
+
713
+ OPTS[gi][2] |= 8 # set validated flag
714
+
715
+ #
716
+ # get group index array from given group IDs
717
+ #
718
+ def group_id_to_index(grpids):
719
+
720
+ count = len(grpids) if grpids else 0
721
+ if count == 0: return None
722
+
723
+ indices = []
724
+ dcnd = "dsid = '{}'".format(params['DS'])
725
+ i = 0
726
+ while i < count:
727
+ gid = grpids[i]
728
+ if gid and (re.match(r'^(!|<|>|<>)$', gid) or gid.find('%') > -1): break
729
+ i += 1
730
+ if i >= count: # normal group id given
731
+ for i in range(count):
732
+ gid = grpids[i]
733
+ if not gid:
734
+ indices.append(0)
735
+ elif i and gid == grpids[i-1]:
736
+ indices.append(indices[i-1])
737
+ else:
738
+ pgrec = PgDBI.pgget("dsgroup", "gindex", "{} AND grpid = '{}'".format(dcnd, gid), PGOPT['extlog'])
739
+ if not pgrec: PgLOG.pglog("Group ID {} not in RDADB for {}".format(gid, params['DS']), PGOPT['extlog'])
740
+ indices.append(pgrec['gindex'])
741
+ return indices
742
+ else: # found wildcard and/or none-equal condition sign
743
+ pgrec = PgDBI.pgmget("dsgroup", "DISTINCT gindex", dcnd + PgDBI.get_field_condition("grpid", grpids, 1), PGOPT['extlog'])
744
+ count = (len(pgrec['gindex']) if pgrec else 0)
745
+ if count == 0: PgLOG.pglog("No Group matches given Group ID condition for " + params['DS'], PGOPT['extlog'])
746
+ return pgrec['gindex']
747
+
748
+ #
749
+ # get group ID array from given group indices
750
+ #
751
+ def group_index_to_id(indices):
752
+
753
+ count = len(indices) if indices else 0
754
+ if count == 0: return None
755
+
756
+ grpids = []
757
+ dcnd = "dsid = '{}'".format(params['DS'])
758
+ i = 0
759
+ while i < count:
760
+ gidx = indices[i]
761
+ if not isinstance(gidx, int) and re.match(r'^(!|<|>|<>)$', gidx): break
762
+ i += 1
763
+ if i >= count: # normal group index given
764
+ for i in range(count):
765
+ gidx = indices[i]
766
+ if not gidx:
767
+ grpids.append('') # default value
768
+ elif i and gidx == indices[i-1]:
769
+ grpids.append(grpids[i-1])
770
+ else:
771
+ pgrec = PgDBI.pgget("dsgroup", "grpid", "{} AND gindex = {}".format(dcnd, gidx), PGOPT['extlog'])
772
+ if not pgrec: PgLOG.pglog("Group Index {} not in RDADB for {}".format(gidx, params['DS']), PGOPT['extlog'])
773
+ grpids.append(pgrec['grpid'])
774
+ return grpids
775
+ else: # found none-equal condition sign
776
+ pgrec = PgDBI.pgmget("dsgroup", "DISTINCT grpid", dcnd + PgDBI.get_field_condition("gindex", indices), PGOPT['extlog'])
777
+ count = (len(pgrec['grpid']) if pgrec else 0)
778
+ if count == 0: PgLOG.pglog("No Group matches given Group Index condition for " + params['DS'], PGOPT['extlog'])
779
+ return pgrec['grpid']
780
+
781
+ #
782
+ # validate order fields and
783
+ # get an array of order fields that are not in given fields
784
+ #
785
+ def append_order_fields(oflds, flds, tname, excludes = None):
786
+
787
+ orders = ''
788
+ hash = TBLHASH[tname]
789
+ for ofld in oflds:
790
+ ufld = ofld.upper()
791
+ if ufld not in hash or excludes and excludes.find(ufld) > -1: continue
792
+ if flds and flds.find(ufld) > -1: continue
793
+ orders += ofld
794
+
795
+ return orders
796
+
797
+ #
798
+ # validate mutiple values for given fields
799
+ #
800
+ def validate_multiple_values(tname, count, flds = None):
801
+
802
+ opts = []
803
+ hash = TBLHASH[tname]
804
+ if flds:
805
+ for fld in flds:
806
+ if fld in hash: opts.append(hash[fld][0])
807
+ else:
808
+ for fld in hash:
809
+ opts.append(hash[fld][0])
810
+
811
+ validate_multiple_options(count, opts, (1 if tname == 'htarfile' else 0))
812
+
813
+ #
814
+ # validate multiple values for given options
815
+ #
816
+ def validate_multiple_options(count, opts, remove = 0):
817
+
818
+ for opt in opts:
819
+ if opt not in params or OPTS[opt][0] != 2: continue # no value given or not multiple value option
820
+ cnt = len(params[opt])
821
+ if cnt == 1 and count > 1 and OPTS[opt][2]&1:
822
+ val0 = params[opt][0]
823
+ params[opt] = [val0]*count
824
+ OPTS[opt][2] |= 4 # expanded
825
+ cnt = count
826
+ if cnt != count:
827
+ if count == 1 and cnt > 1 and OPTS[opt][2]&PGOPT['TXTBIT']:
828
+ params[opt][0] = ' '.join(params[opt])
829
+ elif remove and cnt == 1 and count > 1:
830
+ del params[opt]
831
+ elif cnt < count:
832
+ PgLOG.pglog("Multi-value Option {}({}): {} Given and {} needed".format(opt, OPTS[opt][1], cnt, count), PGOPT['extlog'])
833
+
834
+ #
835
+ # get field keys for a RDADB table, include all if !include
836
+ #
837
+ def get_field_keys(tname, include = None, exclude = None):
838
+
839
+ fields = ''
840
+ hash = TBLHASH[tname]
841
+
842
+ for fld in hash:
843
+ if include and include.find(fld) < 0: continue
844
+ if exclude and exclude.find(fld) > -1: continue
845
+ opt = hash[fld][0]
846
+ if opt in params: fields += fld
847
+
848
+ return fields if fields else None
849
+
850
+ #
851
+ # get a string for fields of a RDADB table
852
+ #
853
+ def get_string_fields(flds, tname, include = None, exclude = None):
854
+
855
+ fields = []
856
+ hash = TBLHASH[tname]
857
+
858
+ for fld in flds:
859
+ ufld = fld.upper() # in case
860
+ if include and include.find(ufld) < 0: continue
861
+ if exclude and exclude.find(ufld) > -1: continue
862
+ if ufld not in hash:
863
+ PgLOG.pglog("Invalid field '{}' to get from '{}'".format(fld, tname), PGOPT['extlog'])
864
+ elif hash[ufld][0] not in OPTS:
865
+ PgLOG.pglog("Option '{}' is not defined for field '{} - {}'".format(hash[ufld][0], ufld, hash[ufld][1]), PGOPT['extlog'])
866
+ if len(hash[ufld]) == 4:
867
+ fname = "{} {}".format(hash[ufld][3], hash[ufld][1])
868
+ else:
869
+ fname = hash[ufld][1]
870
+ fields.append(fname)
871
+
872
+ return ', '.join(fields)
873
+
874
+ #
875
+ # get max count for given options
876
+ #
877
+ def get_max_count(opts):
878
+
879
+ count = 0
880
+ for opt in opts:
881
+ if opt not in params: continue
882
+ cnt = len(params[opt])
883
+ if cnt > count: count = cnt
884
+
885
+ return count
886
+
887
+ #
888
+ # get a string of fields of a RDADB table for sorting
889
+ #
890
+ def get_order_string(flds, tname, exclude = None):
891
+
892
+ orders = []
893
+ hash = TBLHASH[tname]
894
+
895
+ for fld in flds:
896
+ if fld.islower():
897
+ desc = " DESC"
898
+ fld = fld.upper()
899
+ else:
900
+ desc = ""
901
+ if exclude and exclude.find(fld) > -1: continue
902
+ orders.append(hash[fld][1] + desc)
903
+
904
+ return (" ORDER BY " + ', '.join(orders)) if orders else ''
905
+
906
+ #
907
+ # get a string for column titles of a given table
908
+ #
909
+ def get_string_titles(flds, hash, lens):
910
+
911
+ titles = []
912
+ colcnt = len(flds)
913
+ for i in range(colcnt):
914
+ fld = flds[i]
915
+ if fld not in hash: continue
916
+ opt = hash[fld][0]
917
+ if opt not in OPTS: PgLOG.pglog("ERROR: Undefined option " + opt, PGOPT['extlog'])
918
+ title = OPTS[opt][1]
919
+ if lens:
920
+ if len(title) > lens[i]: title = opt
921
+ title = "{:{}}".format(title, lens[i])
922
+ titles.append(title)
923
+
924
+ return params['DV'].join(titles) + params['DV']
925
+
926
+ #
927
+ # display error message and exit
928
+ #
929
+ def parameter_error(p, opt = None, lidx = 0, line = 0, infile = None):
930
+
931
+ if not opt:
932
+ errmsg = "value passed in without leading info option"
933
+ elif opt == "continue":
934
+ errmsg = "error processing input file on continue Line"
935
+ elif opt == 'specified':
936
+ errmsg = "option -{}/-{} is specified already".format(p, OPTS[p][1])
937
+ elif opt == "mixed":
938
+ errmsg = "single-value option mixed with multi-value option"
939
+ elif opt == "missact":
940
+ errmsg = "No Action Option is specified"
941
+ elif opt == "missval":
942
+ errmsg = "No value provided following Info Option"
943
+ elif opt == 'duplicate':
944
+ errmsg = "multiple actions not allowed"
945
+ elif opt == "delayed":
946
+ errmsg = "delayed Mode option not supported"
947
+ elif OPTS[opt][0] == 0:
948
+ errmsg = "value follows Mode Option -{}/-{}".format(opt, OPTS[opt][1])
949
+ elif OPTS[opt][0] == 1:
950
+ errmsg = "multiple values follow single-value Option -{}/-{}".format(opt, OPTS[opt][1])
951
+ elif OPTS[opt][0] >= 4:
952
+ errmsg = "value follows Action Option -{}/-{}".format(opt, OPTS[opt][1])
953
+ else:
954
+ errmsg = None
955
+
956
+ if errmsg:
957
+ if lidx:
958
+ input_error(lidx, line, infile, "{} - {}".format(p, errmsg))
959
+ else:
960
+ PgLOG.pglog("ERROR: {} - {}".format(p, errmsg), PGOPT['extlog'])
961
+
962
+ #
963
+ # wrap function to PgLOG.pglog() for error in input files
964
+ #
965
+ def input_error(lidx, line, infile, errmsg):
966
+
967
+ PgLOG.pglog("ERROR at {}({}): {}\n {}".format(infile, lidx, line, errmsg), PGOPT['extlog'])
968
+
969
+ #
970
+ # wrap function to PgLOG.pglog() for error for action
971
+ #
972
+ def action_error(errmsg, cact = None):
973
+
974
+ msg = "ERROR"
975
+ if PGOPT['ANAME']: msg += " " + PGOPT['ANAME']
976
+ if not cact: cact = PGOPT['CACT']
977
+ if cact: msg += " for Action {} ({})".format(cact, OPTS[cact][1])
978
+
979
+ if 'DS' in params:
980
+ if OPTS['DS'][0] == 1:
981
+ msg += " of " + params['DS']
982
+ elif OPTS['DS'][0] == 2 and len(params['DS']) == 1:
983
+ msg += " of " + params['DS'][0]
984
+
985
+ msg += ": " + errmsg
986
+ if PgLOG.PGLOG['DSCHECK']: PgDBI.record_dscheck_error(msg)
987
+ PgLOG.pglog(msg, PGOPT['extlog'])
988
+
989
+ #
990
+ # get the valid option for given parameter by checking if the given option
991
+ # name matches either an valid option key (short name) or its long name
992
+ # flag: 1 - value key only, 2 - multi-value key only, 3 - action key only,
993
+ # 4 - mode&action key only
994
+ #
995
+ def get_option_key(p, flag = 0, skip = 0, lidx = 0, line = None, infile = None, table = None):
996
+
997
+ if p is None: p = ''
998
+ opt = get_short_option(p)
999
+ errmsg = None
1000
+ if opt:
1001
+ if flag == 1:
1002
+ if OPTS[opt][0]&3 == 0: errmsg = "NOT a Value Option"
1003
+ elif flag == 2:
1004
+ if OPTS[opt][0]&2 == 0: errmsg = "NOT a Multi-Value Option"
1005
+ elif flag == 3:
1006
+ if OPTS[opt][0] < 4:
1007
+ if lidx:
1008
+ errmsg = "NOT an Action Option"
1009
+ else:
1010
+ errmsg = "Miss leading '-' for none action option"
1011
+ elif flag == 4:
1012
+ if OPTS[opt][0]&3:
1013
+ errmsg = "NOT a Mode/Action Option"
1014
+ if errmsg: errmsg = "{}({}) - {}".format(opt, OPTS[opt][1], errmsg)
1015
+ elif not skip:
1016
+ if p:
1017
+ errmsg = "-{} - Unknown Option".format(p)
1018
+ else:
1019
+ errmsg = "'' - Empty Option Name"
1020
+
1021
+ if errmsg:
1022
+ if lidx:
1023
+ input_error(lidx, line, infile, errmsg)
1024
+ else:
1025
+ PgLOG.pglog("ERROR: " + errmsg, PGOPT['extlog'])
1026
+ elif opt and (table or PGOPT['IFCNT'] and OPTS[opt][0] == 2):
1027
+ INOPTS[opt] = 1
1028
+
1029
+ return opt
1030
+
1031
+ #
1032
+ # set values to given options, ignore options set in input files if the options
1033
+ # already set on command line
1034
+ #
1035
+ def set_option_value(opt, val = None, cnl = 0, lidx = 0, line = None, infile = None):
1036
+
1037
+ if opt in CMDOPTS and lidx: # in input file, but given on command line already
1038
+ if opt not in params: params[opt] = CMDOPTS[opt]
1039
+ return
1040
+
1041
+ if val is None: val = ''
1042
+ if OPTS[opt][0]&3:
1043
+ if OPTS[opt][2]&16:
1044
+ if not val:
1045
+ val = 0
1046
+ elif re.match(r'^\d+$', val):
1047
+ val = int(val)
1048
+ elif val and (opt == 'DS' or opt == 'OD'):
1049
+ val = PgUtil.format_dataset_id(val)
1050
+
1051
+ errmsg = None
1052
+ if not cnl and OPTS[opt][0]&3:
1053
+ if opt in params:
1054
+ if OPTS[opt][0] == 2:
1055
+ if OPTS[opt][2]&2: del params[opt] # clean auto set values
1056
+ elif params[opt] != val and not OPTS[opt][2]&1:
1057
+ errmsg = "'{}', multiple values not allowed for Single-Value Option".format(val)
1058
+ if not errmsg and (not PGOPT['CACT'] or OPTS[PGOPT['CACT']][2]):
1059
+ dstr = OPTS[opt][3] if len(OPTS[opt]) > 3 else None
1060
+ if dstr:
1061
+ vlen = len(val)
1062
+ ms = re.match(r'^!(\w*)', dstr)
1063
+ if ms:
1064
+ dstr = ms.group(1)
1065
+ if vlen == 1 and dstr.find(val) > -1: errmsg = "{}: character must not be one of '{}'".format(val, str)
1066
+ elif vlen > 1 or (vlen == 0 and not OPTS[opt][2]&128) or (vlen == 1 and dstr.find(val) < 0):
1067
+ errmsg = "{} single-letter value must be one of '{}'".format(val, dstr)
1068
+
1069
+ if not errmsg:
1070
+ if OPTS[opt][0] == 2: # multiple value option
1071
+ if opt not in params:
1072
+ params[opt] = [val] # set the first value
1073
+ if opt == 'QF' and PGOPT['ACTS'] == OPTS['DL'][0]: OPTS['FS'][3] = 'ANT'
1074
+ else:
1075
+ if cnl:
1076
+ rowidx = len(params[opt]) - 1
1077
+ if params[opt][rowidx]:
1078
+ if not re.match(r'^(DE|DI|DM|DW)$', opt):
1079
+ errmsg = "Multi-line value not allowed"
1080
+ else:
1081
+ params[opt][rowidx] += "\n" + val # multiple line value
1082
+ else:
1083
+ params[opt][rowidx] = val
1084
+ else:
1085
+ params[opt].append(val) # add next value
1086
+ elif OPTS[opt][0] == 1: # single value option
1087
+ if cnl and opt in params:
1088
+ if val: errmsg = "Multi-line value not allowed"
1089
+ elif OPTS[opt][2]&2 and PgUtil.pgcmp(params[opt], val):
1090
+ errmsg = "{}: Single-Value Info Option has value '{}' already".format(val, params[opt])
1091
+ else:
1092
+ params[opt] = val
1093
+ OPTS[opt][2] |= 2
1094
+ elif val:
1095
+ if OPTS[opt][0] == 0 and re.match(r'^(Y|N)$', val, re.I):
1096
+ params[opt] = 1 if (val == 'Y' or val == 'y') else 0
1097
+ else:
1098
+ parameter_error(val, opt, lidx, line, infile) # no value for flag or action options
1099
+ elif opt not in params:
1100
+ params[opt] = 1 # set flag or action option
1101
+ if OPTS[opt][0] > 2:
1102
+ if PGOPT['ACTS']: parameter_error(opt, "duplicate", lidx ,line, infile) # no duplicated action options
1103
+ PGOPT['ACTS'] = OPTS[opt][0] # add action bit
1104
+ PGOPT['CACT'] = opt # add action name
1105
+ if opt == "SB": PGOPT['MSET'] = opt
1106
+
1107
+ if errmsg:
1108
+ if lidx:
1109
+ input_error(lidx, line, infile, "{}({}) - {}".format(opt, OPTS[opt][1], errmsg))
1110
+ else:
1111
+ PgLOG.pglog("ERROR: {}({}) - {}".format(opt, OPTS[opt][1], errmsg), PGOPT['extlog'])
1112
+
1113
+ if not lidx: CMDOPTS[opt] = params[opt] # record options set on command lines
1114
+
1115
+ #
1116
+ # get width for a single row if in column format
1117
+ #
1118
+ def get_row_width(pgrec):
1119
+
1120
+ slen = len(params['DV'])
1121
+ width = 0
1122
+ for key in pgrec:
1123
+ wd = 0
1124
+ for val in pgrec[key]:
1125
+ if not val: continue
1126
+ if not isinstance(val, str): val = str(val)
1127
+ if key == 'note':
1128
+ vlen = val.find('\n') + 1
1129
+ else:
1130
+ vlen = 0
1131
+ if vlen < 1: vlen = len(val)
1132
+ if vlen > wd: wd = vlen # get max width of each column
1133
+
1134
+ # accumulate all column width plus length of delimiter to get row width
1135
+ if width: width += slen
1136
+ width += wd
1137
+
1138
+ return width
1139
+
1140
+ #
1141
+ # get a short option name by searching dict OPTS and ALIAS
1142
+ #
1143
+ def get_short_option(p):
1144
+
1145
+ plen = len(p)
1146
+ if plen == 2:
1147
+ p = p.upper()
1148
+ if p in OPTS: return p
1149
+
1150
+ for opt in OPTS: # get main option first
1151
+ if not PgUtil.pgcmp(OPTS[opt][1], p, 1): return opt
1152
+
1153
+ for opt in ALIAS: # then check alias option
1154
+ for key in ALIAS[opt]:
1155
+ if not PgUtil.pgcmp(key, p, 1): return opt
1156
+
1157
+ return None
1158
+
1159
+ #
1160
+ # print result in column format, with multiple values each row
1161
+ #
1162
+ def print_column_format(pgrec, flds, hash, lens, retbuf = 0):
1163
+
1164
+ rowcnt = -1
1165
+ colcnt = len(flds)
1166
+ buf = ''
1167
+ fields = []
1168
+ flens = []
1169
+ for i in range(colcnt):
1170
+ fld = flds[i]
1171
+ if fld in hash:
1172
+ fld = hash[fld][1]
1173
+ ms = re.search(r'\.(.+)$', fld)
1174
+ if ms: fld = ms.group(1)
1175
+ if fld in pgrec:
1176
+ fields.append(fld)
1177
+ flens.append((lens[i] if lens else 0))
1178
+ if rowcnt < 0: rowcnt = len(pgrec[fld])
1179
+ else:
1180
+ PgLOG.pglog(fld + ": Unkown field name", PGOPT['extlog'])
1181
+
1182
+ colcnt = len(fields)
1183
+ for i in range(rowcnt):
1184
+ offset = 0
1185
+ values = []
1186
+ for j in range(colcnt):
1187
+ fld = fields[j]
1188
+ idx = -1
1189
+ val = pgrec[fld][i]
1190
+ slen = flens[j]
1191
+ if val is None:
1192
+ val = ''
1193
+ elif isinstance(val, str):
1194
+ idx = val.find("\n")
1195
+ if idx > 0:
1196
+ val = "\n" + val
1197
+ idx = 0
1198
+ else:
1199
+ val = str(val)
1200
+ if slen:
1201
+ if idx < 0:
1202
+ val = "{:{}}".format(val, slen)
1203
+ else:
1204
+ val += "\n{:{}}".format(' ', offset)
1205
+ offset += slen
1206
+ values.append(val)
1207
+ line = params['DV'].join(values) + params['DV'] + "\n"
1208
+ if retbuf:
1209
+ buf += line
1210
+ else:
1211
+ OUTPUT.write(line)
1212
+
1213
+ return buf if retbuf else rowcnt
1214
+
1215
+ #
1216
+ # print result in row format, with single value on each row
1217
+ #
1218
+ def print_row_format(pgrec, flds, hash):
1219
+
1220
+ for fld in flds:
1221
+ if fld not in hash: continue
1222
+ line = "{}{}".format(OPTS[hash[fld][0]][1], params['ES'])
1223
+ field = hash[fld][1]
1224
+ ms = re.search(r'\.(.+)$', field)
1225
+ if ms: field = ms.group(1)
1226
+ if field in pgrec:
1227
+ value = pgrec[field]
1228
+ if value is not None: line += str(value)
1229
+ OUTPUT.write(line + "\n")
1230
+
1231
+ #
1232
+ # compress/uncompress given files and change the formats accordingly
1233
+ #
1234
+ def compress_files(files, formats, count):
1235
+
1236
+ if 'UZ' in params:
1237
+ strcmp = 'Uncompress'
1238
+ actcmp = 0
1239
+ else:
1240
+ strcmp = 'Compress'
1241
+ actcmp = 1
1242
+ fmtcnt = len(formats)
1243
+ if not fmtcnt: return files # just in case
1244
+ s = 's' if count > 1 else ''
1245
+ PgLOG.pglog("{}ing {} File{} for {} ...".format(strcmp, count, s, params['DS']), PGOPT['wrnlog'])
1246
+ cmpcnt = 0
1247
+ for i in range(count):
1248
+ fmt = formats[i] if(i < fmtcnt and formats[i]) else formats[0]
1249
+ (ofile, fmt) = PgFile.compress_local_file(files[i], fmt, actcmp, PGOPT['extlog'])
1250
+ if ofile != files[i]:
1251
+ files[i] = ofile
1252
+ cmpcnt += 1
1253
+
1254
+ PgLOG.pglog("{}/{} Files {}ed for {}".format(cmpcnt, count, strcmp, params['DS']) , PGOPT['emllog'])
1255
+
1256
+ if 'ZD' in params: del params['ZD']
1257
+ if 'UZ' in params: del params['UZ']
1258
+
1259
+ return files
1260
+
1261
+ #
1262
+ # get hash condition
1263
+ # tname - table name to identify a table hash
1264
+ # noand - 1 for not add leading 'AND'
1265
+ #
1266
+ def get_hash_condition(tname, include = None, exclude = None, noand = 0):
1267
+
1268
+ condition = ''
1269
+ hash = TBLHASH[tname]
1270
+
1271
+ for key in hash:
1272
+ if include and include.find(key) < 0: continue
1273
+ if exclude and exclude.find(key) > -1: continue
1274
+ opt = hash[key][0]
1275
+ if opt not in params: continue # no option value
1276
+ flg = hash[key][2]
1277
+ if flg < 0: # condition is ignore for this option
1278
+ PgLOG.pglog("Condition given per Option -{} (-{}) is ignored".format(opt, OPTS[opt][1]), PGOPT['errlog'])
1279
+ continue
1280
+
1281
+ fld = hash[key][1]
1282
+ condition += PgDBI.get_field_condition(fld, params[opt], flg, noand)
1283
+ noand = 0
1284
+
1285
+ return condition
1286
+
1287
+ #
1288
+ # set default params value for given opt empty the value if 'all' is given
1289
+ #
1290
+ def set_default_value(opt, dval = None):
1291
+
1292
+ flag = OPTS[opt][0]
1293
+ if flag&3 == 0: return # skip if not single&multiple value options
1294
+
1295
+ oval = 0
1296
+ if opt in params:
1297
+ if flag == 1:
1298
+ oval = params[opt]
1299
+ else:
1300
+ count = len(params[opt])
1301
+ if count == 1:
1302
+ oval = params[opt][0]
1303
+ elif count > 1:
1304
+ return # multiple values given already
1305
+
1306
+ if oval:
1307
+ if re.match(r'^all$', oval, re.I):
1308
+ del params[opt] # remove option value for all
1309
+ return # value given already
1310
+
1311
+ if dval:
1312
+ # set default value
1313
+ if flag == 1:
1314
+ params[opt] = dval
1315
+ else:
1316
+ params[opt] = [dval]
1317
+
1318
+ #
1319
+ # add/strip COS block for give file name and cosflg if given/not-given cosfile
1320
+ # return the file size after the convertion
1321
+ #
1322
+ def cos_convert(locfile, cosflg, cosfile = None):
1323
+
1324
+ if cosfile:
1325
+ cmd = "cosconvert -{} {} {}".format(cosflg, cosfile, locfile)
1326
+ else:
1327
+ cmd = "cosconvert -{} {}".format(cosflg.lower(), locfile)
1328
+ cosfile = locfile
1329
+
1330
+ PgLOG.pgsystem(cmd)
1331
+ info = PgFile.check_local_file(cosfile)
1332
+ if not info:
1333
+ return PgLOG.pglog("Error - " + cmd, PGOPT['errlog']) # should not happen
1334
+ else:
1335
+ return info['data_size']
1336
+
1337
+ #
1338
+ # evaluate count of values for given options
1339
+ #
1340
+ def get_option_count(opts):
1341
+
1342
+ count = 0
1343
+ for opt in opts:
1344
+ if opt in params:
1345
+ cnt = len(params[opt])
1346
+ if cnt > count: count = cnt
1347
+ if count > 0: validate_multiple_options(count, opts)
1348
+
1349
+ return count
1350
+
1351
+ #
1352
+ # gather subgroup indices recursively for given condition
1353
+ # dsid: Dataset Id
1354
+ # pidx: parent group index
1355
+ # gtype: group type if not empty, P - public groups only)
1356
+ #
1357
+ # Return: array reference of group indices
1358
+ #
1359
+ def get_all_subgroups(dcnd, pidx, gtype = None):
1360
+
1361
+ gidxs = [pidx]
1362
+ gflds = "gindex"
1363
+ if gtype: gflds += ", grptype"
1364
+ grecs = PgDBI.pgmget("dsgroup", gflds, "{} and pindex = {}".format(dcnd, pidx), PgLOG.LGWNEX)
1365
+ if not grecs: return gidxs
1366
+
1367
+ gcnt = len(grecs['gindex'])
1368
+ for i in range(gcnt):
1369
+ gidx = grecs['gindex'][i]
1370
+ if abs(gidx) <= abs(pidx) or gtype and grecs['grptype'][i] != gtype: continue
1371
+ subs = get_all_subgroups(dcnd, gidx, gtype)
1372
+ gidxs.extend(subs)
1373
+
1374
+ return gidxs
1375
+
1376
+ #
1377
+ # gather public subgroup indices recursively for given condition. A group index is
1378
+ # gathered only if there are data files right under it. The pidx is included too
1379
+ # if file count of it larger then zero.
1380
+ # dsid: Dataset Id
1381
+ # pidx: parent group index
1382
+ # cfld: count field (dwebcnt, nwebcnt, savedcnt)
1383
+ # pfcnt: file count for parent group index pidx 0 to skip)
1384
+ #
1385
+ # Return: array reference of group indices
1386
+ #
1387
+ def get_data_subgroups(dcnd, pidx, cfld, pfcnt = 0):
1388
+
1389
+ if not pfcnt: # get file count for the parent group
1390
+ pfcnt = group_file_count(dcnd, pidx, cfld)
1391
+ if not pfcnt: return None
1392
+
1393
+ gflds = "gindex, " + cfld
1394
+ gcnd = "{} AND pindex = {} AND {} > 0".format(dcnd, pidx, cfld)
1395
+ grecs = PgDBI.pgmget("dsgroup", gflds, gcnd, PgLOG.LGWNEX)
1396
+ if not grecs: return ([pidx] if pfcnt > 0 else None)
1397
+
1398
+ gcnt = len(grecs['gindex'])
1399
+ gidxs = []
1400
+ for i in range(gcnt):
1401
+ gidx = grecs['gindex'][i]
1402
+ fcnt = grecs[cfld][i]
1403
+ if fcnt == 0 or abs(gidx) <= abs(pidx): continue
1404
+ subs = get_data_subgroups(dcnd, gidx, cfld, fcnt)
1405
+ if subs: gidxs.extend(subs)
1406
+ pfcnt -= fcnt
1407
+ if pfcnt > 0: gidxs.insert(0, pidx)
1408
+
1409
+ return (gidxs if gidxs else None)
1410
+
1411
+ #
1412
+ # get group file count for given count field name
1413
+ #
1414
+ def group_file_count(cnd, gidx, cfld):
1415
+
1416
+ if gidx:
1417
+ table = "dsgroup"
1418
+ cnd += " AND gindex = {}".format(gidx)
1419
+ else:
1420
+ table = "dataset"
1421
+ pgrec = PgDBI.pgget(table, cfld, cnd)
1422
+
1423
+ return (pgrec[cfld] if pgrec else 0)
1424
+
1425
+ #
1426
+ # set file format for actions -AM/-AW from given local files
1427
+ #
1428
+ def set_file_format(count):
1429
+
1430
+ if 'LF' in params:
1431
+ files = params['LF']
1432
+ else:
1433
+ return
1434
+
1435
+ fmtcnt = 0
1436
+ fmts = [None] * count
1437
+ for i in range(count):
1438
+ fmt = PgFile.get_file_format(files[i])
1439
+ if fmt:
1440
+ fmtcnt += 1
1441
+ fmts[i] = fmt
1442
+
1443
+ if fmtcnt:
1444
+ params['AF'] = fmts
1445
+ OPTS['AF'][2] |= 2
1446
+
1447
+ #
1448
+ # get frequency information
1449
+ #
1450
+ def get_control_frequency(frequency):
1451
+
1452
+ val = nf = 0
1453
+ unit = None
1454
+ ms = re.match(r'^(\d+)([YMWDHNS])$', frequency, re.I)
1455
+ if ms:
1456
+ val = int(ms.group(1))
1457
+ unit = ms.group(2).upper()
1458
+ else:
1459
+ ms = re.match(r'^(\d+)M/(\d+)', frequency, re.I)
1460
+ if ms:
1461
+ val = int(ms.group(1))
1462
+ nf = int(ms.group(2))
1463
+ unit = 'M'
1464
+ if nf < 2 or nf > 10 or (30%nf): val = 0
1465
+
1466
+ if not val:
1467
+ if nf:
1468
+ unit = "fraction of month frequency '{}' MUST be (2,3,5,6,10)".format(frequency)
1469
+ elif unit:
1470
+ val = "frequency '{}' MUST be larger than 0".format(frequency)
1471
+ elif re.search(r'/(\d+)$', frequency):
1472
+ val = "fractional frequency '{}' for month ONLY".format(frequency)
1473
+ else:
1474
+ val = "invalid frequency '{}', unit must be (Y,M,W,D,H)".format(frequency)
1475
+ return (None, unit)
1476
+
1477
+ freq = [0]*7 # initialize the frequence list
1478
+ uidx = {'Y' : 0, 'D' : 2, 'H' : 3, 'N' : 4, 'S' : 5}
1479
+ if unit == 'M':
1480
+ freq[1] = val
1481
+ if nf: freq[6] = nf # number of fractions in a month
1482
+ elif unit == 'W':
1483
+ freq[2] = 7 * val
1484
+ elif unit in uidx:
1485
+ freq[uidx[unit]] = val
1486
+
1487
+ return (freq, unit)
1488
+
1489
+ #
1490
+ # check if valid data time for given pindex
1491
+ #
1492
+ def valid_data_time(pgrec, cstr = None, logact = 0):
1493
+
1494
+ if pgrec['pindex'] and pgrec['datatime']:
1495
+ (freq, unit) = get_control_frequency(pgrec['frequency'])
1496
+ if not freq:
1497
+ if cstr: PgLOG.pglog("{}: {}".format(cstr, unit), logact)
1498
+ return PgLOG.FAILURE
1499
+
1500
+ dtime = PgUtil.adddatetime(pgrec['datatime'], freq[0], freq[1], freq[2], freq[3], freq[4], freq[5], freq[6])
1501
+ if PgDBI.pgget("dcupdt", "", "cindex = {} AND datatime < '{}'".format(pgrec['pindex'], dtime), PGOPT['extlog']):
1502
+ if cstr: PgLOG.pglog("{}: MUST be processed After Control Index {}".format(cstr, pgrec['pindex']), logact)
1503
+ return PgLOG.FAILURE
1504
+
1505
+ return PgLOG.SUCCESS
1506
+
1507
+ #
1508
+ # publish filelists for given datasets
1509
+ #
1510
+ def publish_dataset_filelist(dsids):
1511
+
1512
+ for dsid in dsids:
1513
+ PgLOG.pgsystem("publish_filelist " + dsid, PGOPT['wrnlog'], 7)
1514
+
1515
+ #
1516
+ # get the current active version index for given dsid
1517
+ #
1518
+ def get_version_index(dsid, logact = 0):
1519
+
1520
+ pgrec = PgDBI.pgget("dsvrsn", "vindex", "dsid = '{}' AND status = 'A'".format(dsid), logact)
1521
+
1522
+ return (pgrec['vindex'] if pgrec else 0)
1523
+
1524
+ #
1525
+ # append given format (data or archive) sfmt to format string sformat
1526
+ #
1527
+ def append_format_string(sformat, sfmt, chkend = 0):
1528
+
1529
+ mp = r'(^|\.){}$' if chkend else r'(^|\.){}(\.|$)'
1530
+ if sfmt:
1531
+ if not sformat:
1532
+ sformat = sfmt
1533
+ else:
1534
+ for fmt in re.split(r'\.', sfmt):
1535
+ if not re.search(mp.format(fmt), sformat, re.I): sformat += '.' + fmt
1536
+
1537
+ return sformat
1538
+
1539
+ #
1540
+ # get request type string or shared info
1541
+ #
1542
+ def request_type(rtype, idx = 0):
1543
+
1544
+ RTYPE = {
1545
+ 'C' : ["Customized Data", 0],
1546
+ 'D' : ["CDP Link", 0],
1547
+ 'M' : ["Delayed Mode Data", 1],
1548
+ 'N' : ["NCARDAP(THREDDS) Data Server", 0],
1549
+ 'Q' : ["Database Query", 0],
1550
+ 'R' : ["Realtime Data", 0],
1551
+ 'S' : ["Subset Data", 0],
1552
+ 'T' : ["Subset/Format-Conversion Data", 0],
1553
+ 'F' : ["Format Conversion Data", 1], # web
1554
+ 'A' : ["Archive Format Conversion", 1], # web
1555
+ 'P' : ["Plot Chart", 0],
1556
+ 'U' : ["Data", 0]
1557
+ }
1558
+
1559
+ if rtype not in RTYPE: rtype = 'U'
1560
+
1561
+ return RTYPE[rtype][idx]
1562
+
1563
+ #
1564
+ # email notice of for user
1565
+ #
1566
+ def send_request_email_notice(pgrqst, errmsg, fcount, rstat, readyfile = None, pgpart = None):
1567
+
1568
+ pgcntl = PGOPT['RCNTL']
1569
+ rhome = params['WH'] if 'WH' in params and params['WH'] else PgLOG.PGLOG['RQSTHOME']
1570
+ if errmsg:
1571
+ if pgpart:
1572
+ if cache_partition_email_error(pgpart['rindex'], errmsg): return rstat
1573
+ enote = "email_part_error"
1574
+ else:
1575
+ enote = "email_error"
1576
+ elif fcount == 0:
1577
+ if pgcntl and pgcntl['empty_out'] == 'Y':
1578
+ enote = "email_empty"
1579
+ else:
1580
+ errmsg = "NO output data generated"
1581
+ if pgpart:
1582
+ if cache_partition_email_error(pgpart['rindex'], errmsg): return rstat
1583
+ enote = "email_part_error"
1584
+ else:
1585
+ enote = "email_error"
1586
+ elif 'EN' in params and params['EN'][0]:
1587
+ enote = params['EN'][0]
1588
+ elif pgrqst['enotice']:
1589
+ enote = pgrqst['enotice']
1590
+ elif pgcntl and pgcntl['enotice']:
1591
+ enote = pgcntl['enotice']
1592
+ elif pgrqst['globus_transfer'] == 'Y' and pgrqst['task_id']:
1593
+ enote = "email_notice_globus"
1594
+ else:
1595
+ enote = "email_" + ("command" if pgrqst['location'] else "notice")
1596
+
1597
+ if enote[0] not in '/.': enote = "{}/notices/{}".format(rhome, enote)
1598
+
1599
+ finfo = PgFile.check_local_file(enote, 128)
1600
+ if not finfo:
1601
+ if finfo is None:
1602
+ ferror = "file not exists"
1603
+ else:
1604
+ ferror = "Error check file"
1605
+ else:
1606
+ ef = open(enote, 'r') # open email notice file
1607
+ ferror = None
1608
+
1609
+ if ferror:
1610
+ if errmsg:
1611
+ PgLOG.pglog("{}: {}\nCannot email error to {}@ucar.edu: {}".format(enote, ferror, PgLOG.PGLOG['CURUID'], errmsg),
1612
+ (PGOPT['errlog'] if rstat else PGOPT['extlog']))
1613
+ return "E"
1614
+ else:
1615
+ errmsg = PgLOG.pglog("{}: {}\nCannot email notice to {}".format(enote, ferror, pgrqst['email']), PGOPT['errlog']|PgLOG.RETMSG)
1616
+ enote = rhome + "/notices/email_error"
1617
+ ef = open(enote, 'r')
1618
+ rstat = 'E'
1619
+
1620
+ ebuf = ''
1621
+ ebuf += ef.read()
1622
+ ef.close()
1623
+
1624
+ einfo = {}
1625
+ einfo['HOSTNAME'] = PgLOG.PGLOG['HOSTNAME']
1626
+ einfo['DSID'] = pgrqst['dsid']
1627
+ einfo['DSSURL'] = PgLOG.PGLOG['DSSURL']
1628
+ if pgrqst['location']:
1629
+ einfo['WHOME'] = pgrqst['location']
1630
+ else:
1631
+ einfo['WHOME'] = PgLOG.PGLOG['RQSTURL']
1632
+ einfo['SENDER'] = pgrqst['specialist'] + "@ucar.edu"
1633
+ einfo['RECEIVER'] = pgrqst['email']
1634
+ einfo['RTYPE'] = request_type(pgrqst['rqsttype'])
1635
+ PgLOG.add_carbon_copy() # clean carbon copy email in case not empty
1636
+ exclude = (einfo['SENDER'] if errmsg else einfo['RECEIVER'])
1637
+ if not errmsg and pgcntl and pgcntl['ccemail']:
1638
+ PgLOG.add_carbon_copy(pgcntl['ccemail'], 1, exclude, pgrqst['specialist'])
1639
+ if PgLOG.PGLOG['CURUID'] != pgrqst['specialist'] and PgLOG.PGLOG['CURUID'] != PgLOG.PGLOG['RDAUSER']:
1640
+ PgLOG.add_carbon_copy(PgLOG.PGLOG['CURUID'], 1, exclude)
1641
+ if 'CC' in params: PgLOG.add_carbon_copy(params['CC'], 0, exclude)
1642
+ einfo['CCD'] = PgLOG.PGLOG['CCDADDR']
1643
+ einfo['RINDEX'] = str(pgrqst['rindex'])
1644
+ einfo['RQSTID'] = pgrqst['rqstid']
1645
+ pgrec = PgDBI.pgget("dataset", "title", "dsid = '{}'".format(pgrqst['dsid']), PGOPT['extlog'])
1646
+ einfo['DSTITLE'] = pgrec['title'] if pgrec and pgrec['title'] else ''
1647
+ einfo['SUBJECT'] = ''
1648
+ if errmsg:
1649
+ einfo['ERRMSG'] = PgLOG.get_error_command(int(time.time()), PGOPT['errlog']) + errmsg
1650
+ einfo['SUBJECT'] = "Error "
1651
+ if pgpart:
1652
+ einfo['PARTITION'] = " partition"
1653
+ einfo['PTIDX'] = "(PTIDX{})".format(pgpart['pindex'])
1654
+ einfo['SUBJECT'] += "Process Partitions of "
1655
+ else:
1656
+ einfo['PARTITION'] = einfo['PTIDX'] = ''
1657
+ einfo['SUBJECT'] += "Build "
1658
+ einfo['SUBJECT'] += "{} Rqst{} from {}".format(einfo['RTYPE'], pgrqst['rindex'], pgrqst['dsid'])
1659
+ else:
1660
+ if fcount == 0:
1661
+ einfo['SUBJECT'] += "NO Output:"
1662
+ else:
1663
+ einfo['SUBJECT'] += "Completed:"
1664
+ einfo['DAYS'] = str(PGOPT['VP'])
1665
+ pgrec = PgDBI.pgget("dssgrp", "lstname, fstname, phoneno",
1666
+ "logname = '{}'".format(PgLOG.PGLOG['CURUID']), PGOPT['extlog'])
1667
+ if pgrec:
1668
+ einfo['SPECIALIST'] = "{} {}".format(pgrec['fstname'], pgrec['lstname'])
1669
+ einfo['PHONENO'] = pgrec['phoneno']
1670
+ einfo['SUBJECT'] += " {} {} request {} - {}!".format(pgrqst['dsid'], einfo['RTYPE'], pgrqst['rindex'], pgrqst['email'])
1671
+
1672
+ if pgrqst['note']:
1673
+ einfo['RNOTE'] = "\nRequest Detail:\n{}\n".format(pgrqst['note'])
1674
+ elif fcount > 0 and pgrqst['rinfo']:
1675
+ einfo['RNOTE'] = "\nRequest Detail:\n{}\n".format(pgrqst['rinfo'])
1676
+ else:
1677
+ einfo['RNOTE'] = ""
1678
+
1679
+ if pgrqst['globus_transfer'] == 'Y' and pgrqst['task_id']:
1680
+ einfo['GLOBUS_TASK_URL'] = "https://app.globus.org/activity/" + pgrqst['task_id']
1681
+
1682
+ for ekey in einfo:
1683
+ ebuf = re.sub(r'<{}>'.format(ekey), einfo[ekey], ebuf)
1684
+
1685
+ if PgLOG.PGLOG['DSCHECK'] and not pgpart:
1686
+ tbl = "dscheck"
1687
+ cnd = "cindex = {}".format(PgLOG.PGLOG['DSCHECK']['cindex'])
1688
+ else:
1689
+ tbl = "dsrqst"
1690
+ cnd = "rindex = {}".format(pgrqst['rindex'])
1691
+
1692
+ if not PgDBI.cache_customized_email(tbl, "einfo", cnd, ebuf, 0): return 'E'
1693
+ if errmsg:
1694
+ PgLOG.pglog("Error Email {} cached to {}.einfo for {}:\n{}".format(einfo['SENDER'], tbl, cnd, errmsg),
1695
+ PGOPT['errlog'])
1696
+ else:
1697
+ PgLOG.pglog("{}Email {} cached to {}.einfo for {}\nSubset: {}".format(("Customized " if pgrqst['enotice'] else ""), einfo['RECEIVER'], tbl, cnd, einfo['SUBJECT']),
1698
+ PGOPT['wrnlog']|PgLOG.FRCLOG)
1699
+ if readyfile:
1700
+ rf = open(readyfile, 'w')
1701
+ rf.write(ebuf)
1702
+ rf.close()
1703
+ PgFile.set_local_mode(readyfile, 1, PgLOG.PGLOG['FILEMODE'])
1704
+
1705
+ return rstat
1706
+
1707
+ #
1708
+ # cache partition process error to existing email buffer
1709
+ #
1710
+ def cache_partition_email_error(ridx, errmsg):
1711
+
1712
+ pkey = "<PARTERR>"
1713
+ pgrec = PgDBI.pgget("dsrqst", 'einfo', "rindex = {}".format(ridx), PGOPT['extlog'])
1714
+ if not (pgrec and pgrec['einfo'] and pgrec['einfo'].find(pkey) > -1): return 0
1715
+
1716
+ errmsg = PgLOG.get_error_command(int(time.time()), PGOPT['errlog']) + ("{}\n{}".format(errmsg, pkey))
1717
+ pgrec['einfo'] = re.sub(pkey, errmsg, pgrec['einfo'])
1718
+
1719
+ return PgDBI.pgupdt("dsrqst", pgrec, "rindex = {}".format(ridx), PGOPT['extlog'])