rda-python-common 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1740 @@
1
+ #
2
+ ###############################################################################
3
+ #
4
+ # Title : PgOPT.py
5
+ #
6
+ # Author : Zaihua Ji, zji@ucar.edu
7
+ # Date : 08/26/2020
8
+ # 2025-01-10 transferred to package rda_python_common from
9
+ # https://github.com/NCAR/rda-shared-libraries.git
10
+ # Purpose : python library module for holding global varaibles
11
+ # functions for processing options and other global functions
12
+ #
13
+ # Github : https://github.com/NCAR/rda-pyhon-common.git
14
+ #
15
+ ###############################################################################
16
+ #
17
+ import os
18
+ import sys
19
+ import re
20
+ import time
21
+ from os import path as op
22
+ from . import PgLOG
23
+ from . import PgUtil
24
+ from . import PgFile
25
+ from . import PgDBI
26
+
27
+ OUTPUT = None
28
+ CMDOPTS = {}
29
+ INOPTS = {}
30
+
31
+ # global variables are used by all applications and this package.
32
+ # they need be initialized in application specified packages
33
+ ALIAS = {}
34
+ TBLHASH = {}
35
+
36
+ ###############################################################################
37
+ # valid options the first hash value: 0 means mode option, 1 means single-value
38
+ # option, 2 means multiple-value option, and >=4 means action option the second
39
+ # hash values are long option names, either hash keys (considered as short
40
+ # option names) or the associated long names can be used. All options, except for
41
+ # multi-line value ones, can be specified on command line, while single-value and
42
+ # multi-value options, except option -IM for input files, can also given in input
43
+ # files long value option names are used in output files all letters of option
44
+ # names are case insensitive.
45
+ #
46
+ # The third hash value define bit flags,
47
+ # For Action Options:
48
+ # -1 - VSN card actions
49
+ # >0 - setions
50
+ #
51
+ # For Mode Options:
52
+ # 1 - mode for archiving actions
53
+ # 2 - mode for set actions
54
+ #
55
+ # For Single-Value Info Options:
56
+ # 1(0x001) - auto set value
57
+ # 2(0x002) - manually set value
58
+ # 16(0x010) - convert to integer from commandline and input files, set to 0 if empty
59
+ # 32(0x020) - time field
60
+ # 128(0x080) - '' allowed for single letter value
61
+ # 256(0x100) - date field
62
+ #
63
+ # For Multi-Value Info Options:
64
+ # 1(0x001) - one for multiple
65
+ # 2(0x002) - auto-set,
66
+ # 4(0x004) - expanded from one
67
+ # 8(0x008) - validated
68
+ # 16(0x010) - convert to integer from commandline and input files, set to 0 if empty
69
+ # 32(0x020) - time field
70
+ # 64(0x040) - text field allowing multiple lines
71
+ # 128(0x080) - '' allowed for single letter value
72
+ # 256(0x100) - date field
73
+ #
74
+ # The fourth hash values defined retrictions for single letter values
75
+ ###############################################################################
76
+ OPTS = {}
77
+
78
+ # global initial optional values
79
+ PGOPT = {
80
+ 'ACTS' : 0, # carry current action bits
81
+ 'UACTS' : 0, # carry dsarch skip check UD action bits
82
+ 'CACT' : '', # current short action name
83
+ 'IFCNT' : 0, # 1 to read a single Input File at a time
84
+ 'ANAME' : '', # cache the application name if set
85
+ 'TABLE' : '', # table name the action is on
86
+ 'UID' : 0, # user.uid
87
+ 'MSET' : 'SA', # Action for multiple sets
88
+ 'WIDTH' : 128, # max column width
89
+ 'TXTBIT' : 64, # text field bit (0x1000) allow multiple lines
90
+ 'PEMAX' : 12, # max count of reuqest partition errors for auto reprocesses
91
+ 'PTMAX' : 24, # max number of partitions for a single request
92
+ 'REMAX' : 2, # max count of reuqest errors for auto reprocesses
93
+ 'RSMAX' : 100, # max count of gatherxml with options -R -S
94
+ 'RCNTL' : None, # placehold for a request control record
95
+ 'dcm' : "dcm",
96
+ 'sdp' : "sdp",
97
+ 'rcm' : "rcm",
98
+ 'scm' : "scm",
99
+ 'wpg' : "",
100
+ 'gatherxml' : "gatherxml",
101
+ 'cosconvert' : "cosconvert",
102
+ 'emllog' : PgLOG.LGWNEM,
103
+ 'emlerr' : PgLOG.LOGERR|PgLOG.EMEROL,
104
+ 'emerol' : PgLOG.LOGWRN|PgLOG.EMEROL,
105
+ 'emlsum' : PgLOG.LOGWRN|PgLOG.EMLSUM,
106
+ 'emlsep' : PgLOG.LGWNEM|PgLOG.SEPLIN,
107
+ 'wrnlog' : PgLOG.LOGWRN,
108
+ 'errlog' : PgLOG.LOGERR,
109
+ 'extlog' : PgLOG.LGEREX,
110
+ 'PTYPE' : "CPRV",
111
+ 'WDTYP' : "ADNU",
112
+ 'HFTYP' : "DS",
113
+ 'SDTYP' : "PORWUV",
114
+ 'GXTYP' : "DP"
115
+ }
116
+
117
+ # global default parameters
118
+ params = {
119
+ 'ES' : "<=>",
120
+ 'AO' : "<!>",
121
+ 'DV' : "<:>"
122
+ }
123
+
124
+ WTYPE = {
125
+ 'A' : "ARCO",
126
+ 'D' : "DATA",
127
+ 'N' : "NCAR",
128
+ 'U' : "UNKNOWN",
129
+ }
130
+
131
+ HTYPE = {
132
+ 'D' : "DOCUMENT",
133
+ 'S' : "SOFTWARE",
134
+ 'U' : "UNKNOWN"
135
+ }
136
+
137
+ HPATH = {
138
+ 'D' : "docs",
139
+ 'S' : "software",
140
+ 'U' : "help"
141
+ }
142
+
143
+ MTYPE = {
144
+ 'P' : "PRIMARY",
145
+ 'A' : "ARCHIVING",
146
+ 'V' : "VERSION",
147
+ 'W' : "WORKING",
148
+ 'R' : "ORIGINAL",
149
+ 'B' : "BACKUP",
150
+ 'O' : "OFFSITE",
151
+ 'C' : "CHRONOPOLIS",
152
+ 'U' : "UNKNOWN"
153
+ }
154
+
155
+ STYPE = {
156
+ 'O' : "OFFLINE",
157
+ 'P' : "PRIMARY",
158
+ 'R' : "ORIGINAL",
159
+ 'V' : "VERSION",
160
+ 'W' : "WORKING",
161
+ 'U' : "UNKNOWN"
162
+ }
163
+
164
+ BTYPE = {
165
+ 'B' : "BACKUPONLY",
166
+ 'D' : "BACKDRDATA",
167
+ }
168
+
169
+ #
170
+ # process and parsing input information
171
+ # aname - application name such as 'dsarch', 'dsupdt', and 'dsrqst'
172
+ #
173
+ def parsing_input(aname):
174
+
175
+ PgLOG.PGLOG['LOGFILE'] = aname + ".log"
176
+ PGOPT['ANAME'] = aname
177
+ PgDBI.dssdb_dbname()
178
+ argv = sys.argv[1:]
179
+ if not argv: PgLOG.show_usage(aname)
180
+
181
+ PgLOG.cmdlog("{} {}".format(aname, ' '.join(argv)))
182
+
183
+ # process command line options to fill option values
184
+ option = infile = None
185
+ needhelp = 0
186
+ helpopts = {}
187
+ for param in argv:
188
+ if re.match(r'^(-{0,2}help|-H)$', param, re.I):
189
+ if option: helpopts[option] = OPTS[option]
190
+ needhelp = 1
191
+ continue
192
+
193
+ ms = re.match(r'^-([a-zA-Z]\w*)$', param)
194
+ if ms: # option parameter
195
+ param = ms.group(1)
196
+ if option and not needhelp and option not in params:
197
+ val = get_default_info(option)
198
+ if val is not None:
199
+ set_option_value(option, val)
200
+ else:
201
+ parameter_error("-" + option, "missval")
202
+ option = get_option_key(param)
203
+ if needhelp:
204
+ helpopts[option] = OPTS[option]
205
+ break
206
+
207
+ # set mode/action options
208
+ if OPTS[option][0]&3 == 0: set_option_value(option)
209
+
210
+ elif option:
211
+ ms =re.match(r"^\'(.*)\'$", param)
212
+ if ms: param = ms.group(1)
213
+ set_option_value(option, param)
214
+
215
+ elif PgUtil.find_dataset_id(param):
216
+ set_option_value('DS', param)
217
+
218
+ else:
219
+ option = get_option_key(param, 3, 1)
220
+ if option:
221
+ set_option_value(option)
222
+ if needhelp:
223
+ helpopts[option] = OPTS[option]
224
+ break
225
+ elif op.exists(param): # assume input file
226
+ infile = param
227
+ else:
228
+ parameter_error(param)
229
+
230
+ if needhelp: PgLOG.show_usage(aname, helpopts)
231
+
232
+ if option and option not in params:
233
+ val = get_default_info(option)
234
+ if val is not None:
235
+ set_option_value(option, val)
236
+ else:
237
+ parameter_error("-" + option, "missval")
238
+
239
+ # check if only an input filename is given on command line following aname
240
+ if infile:
241
+ if 'IF' in params:
242
+ parameter_error(infile)
243
+ else:
244
+ params['IF'] = [infile]
245
+
246
+ # process given one or multiple input files to fill option values
247
+ if 'IF' in params:
248
+ PGOPT['IFCNT'] = 1 if PGOPT['CACT'] == 'AQ' else 0
249
+ if OPTS['DS'][0] == 1:
250
+ param = validate_infile_names(params['DS']) if 'DS' in params else 0
251
+ else:
252
+ param = 1
253
+ get_input_info(params['IF'])
254
+ if not param and 'DS' in params: validate_infile_names(params['DS'])
255
+
256
+ if not PGOPT['ACTS']: parameter_error(aname, "missact") # no action enter
257
+
258
+ if 'DB' in params:
259
+ dcnt = len(params['DB'])
260
+ for i in range(dcnt):
261
+ if i == 0:
262
+ PgLOG.PGLOG['DBGLEVEL'] = params['DB'][0]
263
+ elif i == 1:
264
+ PgLOG.PGLOG['DBGPATH'] = params['DB'][1]
265
+ elif i == 2:
266
+ PgLOG.PGLOG['DBGFILE'] = params['DB'][2]
267
+ PgLOG.pgdbg(PgLOG.PGLOG['DBGLEVEL'])
268
+
269
+ if 'GZ' in params: PgLOG.PGLOG['GMTZ'] = PgUtil.diffgmthour()
270
+ if 'BG' in params: PgLOG.PGLOG['BCKGRND'] = 1
271
+
272
+ #
273
+ # check and get default value for info option, return None if not available
274
+ #
275
+ def get_default_info(opt):
276
+
277
+ olist = OPTS[opt]
278
+ if olist[0]&3 and len(olist) > 3:
279
+ odval = olist[3]
280
+ if not odval or isinstance(odval, int):
281
+ return odval
282
+ else:
283
+ return odval[0] # return the first char of a default string
284
+
285
+ return None
286
+
287
+ #
288
+ # set output file name handler now
289
+ #
290
+ def open_output(outfile = None):
291
+
292
+ global OUTPUT
293
+
294
+ if outfile: # result output file
295
+ try:
296
+ OUTPUT = open(outfile, 'w')
297
+ except Exception as e:
298
+ PgLOG.pglog("{}: Error open file to write - {}".format(outfile, str(e)), PGOPT['extlog'])
299
+ else: # result to STDOUT
300
+ OUTPUT = sys.stdout
301
+
302
+ #
303
+ # return 1 if valid infile names; sys.exit(1) otherwise
304
+ #
305
+ def validate_infile_names(dsid):
306
+
307
+ i = 0
308
+ for infile in params['IF']:
309
+ if not validate_one_infile(infile, dsid): return PgLOG.FAILURE
310
+ i += 1
311
+ if PGOPT['IFCNT'] and i >= PGOPT['IFCNT']: break
312
+
313
+ return i
314
+
315
+ #
316
+ # validate an input filename against dsid
317
+ #
318
+ def validate_one_infile(infile, dsid):
319
+
320
+ ndsid = PgUtil.find_dataset_id(infile)
321
+ if ndsid == None:
322
+ return PgLOG.pglog("{}: No dsid identified in Input file name {}!".format(dsid, infile), PGOPT['extlog'])
323
+
324
+ fdsid = PgUtil.format_dataset_id(ndsid)
325
+ if fdsid != dsid:
326
+ return PgLOG.pglog("{}: Different dsid {} found in Input file name {}!".format(dsid, fdsid, infile), PGOPT['extlog'])
327
+
328
+ return PgLOG.SUCCESS
329
+
330
+ #
331
+ # gather input information from input files
332
+ #
333
+ def get_input_info(infiles, table = None):
334
+
335
+ i = 0
336
+ for file in infiles:
337
+ i += process_infile(file, table)
338
+ if not PGOPT['IFCNT'] and PGOPT['CACT'] == 'AQ': PGOPT['IFCNT'] = 1
339
+ if PGOPT['IFCNT']: break
340
+
341
+ return i
342
+
343
+ #
344
+ # validate and get info from a single input file
345
+ #
346
+ def read_one_infile(infile):
347
+
348
+ dsid = params['DS']
349
+ del params['DS']
350
+ if OPTS['DS'][2]&2: OPTS['DS'][2] &= ~2
351
+ if 'DS' in CMDOPTS: del CMDOPTS['DS']
352
+ clean_input_values()
353
+ process_infile(infile)
354
+ if 'DS' in params: dsid = params['DS']
355
+ if dsid: validate_one_infile(infile, dsid)
356
+
357
+ return dsid
358
+
359
+ #
360
+ # gather input option values from one input file
361
+ #
362
+ # return 0 if nothing retireved if table is not null
363
+ #
364
+ def process_infile(infile, table = None):
365
+
366
+ if not op.exists(infile): PgLOG.pglog(infile + ": Input file not exists", PGOPT['extlog'])
367
+ if table:
368
+ PgLOG.pglog("Gather '{}' information from input file '{}'..." .format(table, infile), PGOPT['wrnlog'])
369
+ else:
370
+ PgLOG.pglog("Gather information from input file '{}'...".format(infile), PGOPT['wrnlog'])
371
+
372
+ try:
373
+ fd = open(infile, 'r')
374
+ except Exception as e:
375
+ PgLOG.pglog("{}: Error Open input file - {}!".format(infile, str(e)), PGOPT['extlog'])
376
+ else:
377
+ lines = fd.readlines()
378
+ fd.close()
379
+
380
+ opt = None
381
+ columns = []
382
+ chktbl = 1 if table else -1
383
+ mpes = r'^(\w+)\s*{}\s*(.*)$'.format(params['ES'])
384
+ mpao = r'^(\w+)\s*{}'.format(params['AO'])
385
+ # column count, column index, value count, value index, line index, option-set count, end divider flag
386
+ colcnt = colidx = valcnt = validx = linidx = setcnt = enddiv = 0
387
+ for line in lines:
388
+ linidx += 1
389
+ if linidx%50000 == 0:
390
+ PgLOG.pglog("{}: {} lines read".format(infile, linidx), PGOPT['wrnlog'])
391
+ if 'NT' not in params: line = PgLOG.pgtrim(line, 2)
392
+ if not line:
393
+ if opt: set_option_value(opt, '', 1, linidx, line, infile)
394
+ continue # skip empty lines
395
+ if chktbl > 0:
396
+ if re.match(r'^\[{}\]$'.format(table), line, re.I): # found entry for table
397
+ chktbl = 0
398
+ clean_input_values() # clean previously saved input values
399
+ continue
400
+ else:
401
+ ms = re.match(r'^\[(\w+)\]$', line)
402
+ if ms:
403
+ if chktbl == 0: break # stop at next sub-title
404
+ if not PGOPT['MSET']:
405
+ input_error(linidx, line, infile, ms.group(1) + ": Cannt process sub-title")
406
+ elif PGOPT['CACT'] != PGOPT['MSET']:
407
+ input_error(linidx, line, infile, "Use Action -{} to Set multiple sub-titles".format(PGOPT['MSET']))
408
+ break # stop getting info if no table given or a different table
409
+
410
+ if colcnt == 0: # check single value and action lines first
411
+ ms = re.match(mpes, line)
412
+ if ms: # one value assignment
413
+ key = ms.group(1).strip()
414
+ val = ms.group(2)
415
+ if val and 'NT' not in params: val = val.strip()
416
+ opt = get_option_key(key, 1, 0, linidx, line, infile, table)
417
+ set_option_value(opt, val, 0, linidx, line, infile)
418
+ if not OPTS[opt][2]&PGOPT['TXTBIT']: opt = None
419
+ setcnt += 1
420
+ continue
421
+
422
+ ms = re.match(mpao, line)
423
+ if ms: # set mode or action option
424
+ key = get_option_key(ms.group(1).strip(), 4, 0, linidx, line, infile, table)
425
+ set_option_value(key, '', 0, linidx, line, infile)
426
+ setcnt += 1
427
+ continue
428
+
429
+ # check mutiple value assignment for one or more multi-value options
430
+ values = line.split(params['DV'])
431
+ valcnt = len(values)
432
+ if colcnt == 0:
433
+ while colcnt < valcnt:
434
+ key = values[colcnt].strip()
435
+ if not key: break
436
+ opt = get_option_key(key, 2, 1, linidx, line, infile, table)
437
+ if not opt: break
438
+ columns.append(opt)
439
+ if opt in params: del params[opt]
440
+ colcnt += 1
441
+ if colcnt < valcnt:
442
+ if colcnt == (valcnt-1):
443
+ enddiv = 1
444
+ else:
445
+ input_error(linidx, line, infile, "Multi-value Option Name missed for column {}".format(colcnt+1))
446
+ opt = None
447
+ continue
448
+
449
+ elif valcnt == 1:
450
+ if re.match(mpes, line):
451
+ input_error(linidx, line, infile, "Cannot set single value option after Multi-value Options")
452
+ elif re.match(mpao, line):
453
+ input_error(linidx, line, infile, "Cannot set acttion/mode option after Multi-value Options")
454
+
455
+ if opt: # add to multipe line value
456
+ val = values.pop(0)
457
+ valcnt -= 1
458
+ if val and 'NT' not in params: val = val.strip()
459
+ set_option_value(opt, val, 1, linidx, line, infile)
460
+ setcnt += 1
461
+ if valcnt == 0: continue # continue to check multiple line value
462
+ colidx += 1
463
+ opt = None
464
+
465
+ reduced = 0
466
+ valcnt += colidx
467
+ if valcnt > colcnt:
468
+ if enddiv:
469
+ val = values.pop()
470
+ if not val.strip():
471
+ valcnt -= 1
472
+ reduced = 1
473
+ if valcnt > colcnt:
474
+ input_error(linidx, line, infile, "Too many values({}) provided for {} columns".format(valcnt+colidx, colcnt))
475
+
476
+ if values:
477
+ for val in values:
478
+ opt = columns[colidx]
479
+ colidx += 1
480
+ if val and 'NT' not in params: val = val.strip()
481
+ set_option_value(opt, val, 0, linidx, line, infile)
482
+ setcnt += 1
483
+ colidx += (reduced-enddiv)
484
+
485
+ if colidx == colcnt:
486
+ colidx = 0 # done with gathering values of a multi-value line
487
+ opt = None
488
+ elif opt and not OPTS[opt][2]&PGOPT['TXTBIT']:
489
+ colidx += 1
490
+ opt = None
491
+
492
+ if setcnt > 0:
493
+ if colidx:
494
+ if colidx < colcnt:
495
+ input_error(linidx, '', infile, "{} of {} values missed".format(colcnt-colidx, colcnt))
496
+ elif enddiv:
497
+ input_error(linidx, '', infile, "Miss end divider '{}'".format(params['DV']))
498
+ return 1 # read something
499
+ else:
500
+ if table: PgLOG.pglog("No option information found for '{}'".format(table), PgLOG.WARNLG)
501
+ return 0 # read nothing
502
+
503
+ #
504
+ # clean params for input option values when set mutiple tables
505
+ #
506
+ def clean_input_values():
507
+
508
+ global INOPTS
509
+ # clean previously saved input values if any
510
+ for opt in INOPTS:
511
+ del params[opt]
512
+ INOPTS = {}
513
+
514
+ #
515
+ # build a hash record for add or update of a table record
516
+ #
517
+ def build_record(flds, pgrec, tname, idx = 0):
518
+
519
+ record = {}
520
+ if not flds: return record
521
+
522
+ hash = TBLHASH[tname]
523
+
524
+ for key in flds:
525
+ if key not in hash: continue
526
+ opt = hash[key][0]
527
+ field = hash[key][3] if len(hash[key]) == 4 else hash[key][1]
528
+ ms = re.search(r'\.(.+)$', field)
529
+ if ms: field = ms.group(1)
530
+ if opt in params:
531
+ if OPTS[opt][0] == 1:
532
+ val = params[opt]
533
+ else:
534
+ if OPTS[opt][2]&2 and pgrec and field in pgrec and pgrec[field]: continue
535
+ val = params[opt][idx]
536
+ sval = pgrec[field] if pgrec and field in pgrec else None
537
+ if sval is None:
538
+ if val == '': val = None
539
+ elif isinstance(sval, int):
540
+ if isinstance(val, str): val = (int(val) if val else None) # change '' to None for int
541
+ if PgUtil.pgcmp(sval, val, 1): record[field] = val # record new or changed value
542
+
543
+ return record
544
+
545
+ #
546
+ # set global variable PGOPT['UID'] with value of user.uid, fatal if unsuccessful
547
+ #
548
+ def set_uid(aname):
549
+
550
+ set_email_logact()
551
+
552
+ if 'LN' not in params:
553
+ params['LN'] = PgLOG.PGLOG['CURUID']
554
+ elif params['LN'] != PgLOG.PGLOG['CURUID']:
555
+ params['MD'] = 1 # make sure this set if running as another user
556
+ if 'NE' not in params: PgLOG.PGLOG['EMLADDR'] = params['LN']
557
+ if 'DM' in params and re.match(r'^(start|begin)$', params['DM'], re.I):
558
+ msg = "'{}' must start Daemon '{} -{}' as '{}'".format(PgLOG.PGLOG['CURUID'], aname, PGOPT['CACT'], params['LN'])
559
+ else:
560
+ msg = "'{}' runs '{} -{}' as '{}'!".format(PgLOG.PGLOG['CURUID'], aname, PGOPT['CACT'], params['LN'])
561
+ PgLOG.pglog(msg, PGOPT['wrnlog'])
562
+ PgLOG.set_specialist_environments(params['LN'])
563
+
564
+ if 'LN' not in params: PgLOG.pglog("Could not get user login name", PGOPT['extlog'])
565
+
566
+ validate_dataset()
567
+ if OPTS[PGOPT['CACT']][2] > 0: validate_dsowner(aname)
568
+
569
+ pgrec = PgDBI.pgget("dssdb.user", "uid", "logname = '{}' AND until_date IS NULL".format(params['LN']), PGOPT['extlog'])
570
+ if not pgrec: PgLOG.pglog("Could not get user.uid for " + params['LN'], PGOPT['extlog'])
571
+ PGOPT['UID'] = pgrec['uid']
572
+
573
+ open_output(params['OF'] if 'OF' in params else None)
574
+
575
+ #
576
+ # set global variable PGOPT['UID'] as 0 for a sudo user
577
+ #
578
+ def set_sudo_uid(aname, uid):
579
+
580
+ set_email_logact()
581
+
582
+ if PgLOG.PGLOG['CURUID'] != uid:
583
+ if 'DM' in params and re.match(r'^(start|begin)$', params['DM'], re.I):
584
+ msg = "'{}': must start Daemon '{} -{} as '{}'".format(PgLOG.PGLOG['CURUID'], aname, params['CACT'], uid)
585
+ else:
586
+ msg = "'{}': must run '{} -{}' as '{}'".format(PgLOG.PGLOG['CURUID'], aname, params['CACT'], uid)
587
+ PgLOG.pglog(msg, PGOPT['extlog'])
588
+
589
+ PGOPT['UID'] = 0
590
+ params['LN'] = PgLOG.PGLOG['CURUID']
591
+
592
+ #
593
+ # set global variable PGOPT['UID'] as 0 for root user
594
+ #
595
+ def set_root_uid(aname):
596
+
597
+ set_email_logact()
598
+
599
+ if PgLOG.PGLOG['CURUID'] != "root":
600
+ if 'DM' in params and re.match(r'^(start|begin)$', params['DM'], re.I):
601
+ msg = "'{}': you must start Daemon '{} -{} as 'root'".format(PgLOG.PGLOG['CURUID'], aname, params['CACT'])
602
+ else:
603
+ msg = "'{}': you must run '{} -{}' as 'root'".format(PgLOG.PGLOG['CURUID'], aname, params['CACT'])
604
+ PgLOG.pglog(msg, PGOPT['extlog'])
605
+
606
+ PGOPT['UID'] = 0
607
+ params['LN'] = PgLOG.PGLOG['CURUID']
608
+
609
+ #
610
+ # set email logging bits
611
+ #
612
+ def set_email_logact():
613
+
614
+ if 'NE' in params:
615
+ PgLOG.PGLOG['LOGMASK'] &= ~PgLOG.EMLALL # remove all email bits
616
+ elif 'SE' in params:
617
+ PgLOG.PGLOG['LOGMASK'] &= ~PgLOG.EMLLOG # no normal email
618
+
619
+ #
620
+ # validate dataset owner
621
+ #
622
+ # return: 0 or fatal if not valid, 1 if valid, -1 if can not be validated
623
+ #
624
+ def validate_dsowner(aname, dsid = None, logname = None, pgds = 0, logact = 0):
625
+
626
+ if not logname: logname = (params['LN'] if 'LN' in params else PgLOG.PGLOG['CURUID'])
627
+ if logname == PgLOG.PGLOG['GDEXUSER']: return 1
628
+
629
+ dsids = {}
630
+ if dsid:
631
+ dsids[dsid] = 1
632
+ elif 'DS' in params:
633
+ if OPTS['DS'][0] == 2:
634
+ for dsid in params['DS']:
635
+ dsids[dsid] = 1
636
+ else:
637
+ dsids[params['DS']] = 1
638
+ else:
639
+ return -1
640
+
641
+ if not pgds and 'MD' in params: pgds = 1
642
+ if not logact: logact = PGOPT['extlog']
643
+
644
+ for dsid in dsids:
645
+ if not PgDBI.pgget("dsowner", "", "dsid = '{}' AND specialist = '{}'".format(dsid, logname), PGOPT['extlog']):
646
+ if not PgDBI.pgget("dssgrp", "", "logname = '{}'".format(logname), PGOPT['extlog']):
647
+ return PgLOG.pglog("'{}' is not DSS Specialist!".format(logname), logact)
648
+ elif not pgds:
649
+ return PgLOG.pglog("'{}' not listed as Specialist of '{}'\nRun '{}' with Option -MD!".format(logname, dsid, aname), logact)
650
+
651
+ return 1
652
+
653
+ #
654
+ # validate dataset
655
+ #
656
+ def validate_dataset():
657
+
658
+ cnt = 1
659
+ if 'DS' in params:
660
+ if OPTS['DS'][0] == 2:
661
+ for dsid in params['DS']:
662
+ cnt = PgDBI.pgget("dataset", "", "dsid = '{}'".format(dsid), PGOPT['extlog'])
663
+ if cnt == 0: break
664
+ else:
665
+ dsid = params['DS']
666
+ cnt = PgDBI.pgget("dataset", "", "dsid = '{}'".format(dsid), PGOPT['extlog'])
667
+
668
+ if not cnt: PgLOG.pglog(dsid + " not exists in RDADB!", PGOPT['extlog'])
669
+
670
+ #
671
+ # validate given group indices or group names
672
+ #
673
+ def validate_groups(parent = 0):
674
+
675
+ if parent:
676
+ gi = 'PI'
677
+ gn = 'PN'
678
+ else:
679
+ gi = 'GI'
680
+ gn = 'GN'
681
+ if (OPTS[gi][2]&8): return # already validated
682
+
683
+ dcnd = "dsid = '{}'".format(params['DS'])
684
+ if gi in params:
685
+ grpcnt = len(params[gi])
686
+ i = 0
687
+ while i < grpcnt:
688
+ gidx = params[gi][i]
689
+ if not isinstance(gidx, int) and re.match(r'^(!|<|>|<>)$', gidx): break
690
+ i += 1
691
+ if i >= grpcnt: # normal group index given
692
+ for i in range(grpcnt):
693
+ gidx = params[gi][i]
694
+ gidx = int(gidx) if gidx else 0
695
+ params[gi][i] = gidx
696
+ if gidx == 0 or (i > 0 and gidx == params[gi][i-1]): continue
697
+ if not PgDBI.pgget("dsgroup", '', "{} AND gindex = {}".format(dcnd, gidx), PGOPT['extlog']):
698
+ if i > 0 and parent and params['GI']:
699
+ j = 0
700
+ while j < i:
701
+ if gidx == params['GI'][j]: break
702
+ j += 1
703
+ if j < i: continue
704
+ PgLOG.pglog("Group Index {} not in RDADB for {}".format(gidx, params['DS']), PGOPT['extlog'])
705
+ else: # found none-equal condition sign
706
+ pgrec = PgDBI.pgmget("dsgroup", "DISTINCT gindex", dcnd + PgDBI.get_field_condition("gindex", params[gi]), PGOPT['extlog'])
707
+ grpcnt = (len(pgrec['gindex']) if pgrec else 0)
708
+ if grpcnt == 0:
709
+ PgLOG.pglog("No Group matches given Group Index condition for " + params['DS'], PGOPT['extlog'])
710
+
711
+ params[gi] = pgrec['gindex']
712
+ elif gn in params:
713
+ params[gi] = group_id_to_index(params[gn])
714
+
715
+ OPTS[gi][2] |= 8 # set validated flag
716
+
717
+ #
718
+ # get group index array from given group IDs
719
+ #
720
+ def group_id_to_index(grpids):
721
+
722
+ count = len(grpids) if grpids else 0
723
+ if count == 0: return None
724
+
725
+ indices = []
726
+ dcnd = "dsid = '{}'".format(params['DS'])
727
+ i = 0
728
+ while i < count:
729
+ gid = grpids[i]
730
+ if gid and (re.match(r'^(!|<|>|<>)$', gid) or gid.find('%') > -1): break
731
+ i += 1
732
+ if i >= count: # normal group id given
733
+ for i in range(count):
734
+ gid = grpids[i]
735
+ if not gid:
736
+ indices.append(0)
737
+ elif i and gid == grpids[i-1]:
738
+ indices.append(indices[i-1])
739
+ else:
740
+ pgrec = PgDBI.pgget("dsgroup", "gindex", "{} AND grpid = '{}'".format(dcnd, gid), PGOPT['extlog'])
741
+ if not pgrec: PgLOG.pglog("Group ID {} not in RDADB for {}".format(gid, params['DS']), PGOPT['extlog'])
742
+ indices.append(pgrec['gindex'])
743
+ return indices
744
+ else: # found wildcard and/or none-equal condition sign
745
+ pgrec = PgDBI.pgmget("dsgroup", "DISTINCT gindex", dcnd + PgDBI.get_field_condition("grpid", grpids, 1), PGOPT['extlog'])
746
+ count = (len(pgrec['gindex']) if pgrec else 0)
747
+ if count == 0: PgLOG.pglog("No Group matches given Group ID condition for " + params['DS'], PGOPT['extlog'])
748
+ return pgrec['gindex']
749
+
750
+ #
751
+ # get group ID array from given group indices
752
+ #
753
+ def group_index_to_id(indices):
754
+
755
+ count = len(indices) if indices else 0
756
+ if count == 0: return None
757
+
758
+ grpids = []
759
+ dcnd = "dsid = '{}'".format(params['DS'])
760
+ i = 0
761
+ while i < count:
762
+ gidx = indices[i]
763
+ if not isinstance(gidx, int) and re.match(r'^(!|<|>|<>)$', gidx): break
764
+ i += 1
765
+ if i >= count: # normal group index given
766
+ for i in range(count):
767
+ gidx = indices[i]
768
+ if not gidx:
769
+ grpids.append('') # default value
770
+ elif i and gidx == indices[i-1]:
771
+ grpids.append(grpids[i-1])
772
+ else:
773
+ pgrec = PgDBI.pgget("dsgroup", "grpid", "{} AND gindex = {}".format(dcnd, gidx), PGOPT['extlog'])
774
+ if not pgrec: PgLOG.pglog("Group Index {} not in RDADB for {}".format(gidx, params['DS']), PGOPT['extlog'])
775
+ grpids.append(pgrec['grpid'])
776
+ return grpids
777
+ else: # found none-equal condition sign
778
+ pgrec = PgDBI.pgmget("dsgroup", "DISTINCT grpid", dcnd + PgDBI.get_field_condition("gindex", indices), PGOPT['extlog'])
779
+ count = (len(pgrec['grpid']) if pgrec else 0)
780
+ if count == 0: PgLOG.pglog("No Group matches given Group Index condition for " + params['DS'], PGOPT['extlog'])
781
+ return pgrec['grpid']
782
+
783
+ #
784
+ # validate order fields and
785
+ # get an array of order fields that are not in given fields
786
+ #
787
+ def append_order_fields(oflds, flds, tname, excludes = None):
788
+
789
+ orders = ''
790
+ hash = TBLHASH[tname]
791
+ for ofld in oflds:
792
+ ufld = ofld.upper()
793
+ if ufld not in hash or excludes and excludes.find(ufld) > -1: continue
794
+ if flds and flds.find(ufld) > -1: continue
795
+ orders += ofld
796
+
797
+ return orders
798
+
799
+ #
800
+ # validate mutiple values for given fields
801
+ #
802
+ def validate_multiple_values(tname, count, flds = None):
803
+
804
+ opts = []
805
+ hash = TBLHASH[tname]
806
+ if flds:
807
+ for fld in flds:
808
+ if fld in hash: opts.append(hash[fld][0])
809
+ else:
810
+ for fld in hash:
811
+ opts.append(hash[fld][0])
812
+
813
+ validate_multiple_options(count, opts, (1 if tname == 'htarfile' else 0))
814
+
815
+ #
816
+ # validate multiple values for given options
817
+ #
818
+ def validate_multiple_options(count, opts, remove = 0):
819
+
820
+ for opt in opts:
821
+ if opt not in params or OPTS[opt][0] != 2: continue # no value given or not multiple value option
822
+ cnt = len(params[opt])
823
+ if cnt == 1 and count > 1 and OPTS[opt][2]&1:
824
+ val0 = params[opt][0]
825
+ params[opt] = [val0]*count
826
+ OPTS[opt][2] |= 4 # expanded
827
+ cnt = count
828
+ if cnt != count:
829
+ if count == 1 and cnt > 1 and OPTS[opt][2]&PGOPT['TXTBIT']:
830
+ params[opt][0] = ' '.join(params[opt])
831
+ elif remove and cnt == 1 and count > 1:
832
+ del params[opt]
833
+ elif cnt < count:
834
+ PgLOG.pglog("Multi-value Option {}({}): {} Given and {} needed".format(opt, OPTS[opt][1], cnt, count), PGOPT['extlog'])
835
+
836
+ #
837
+ # get field keys for a RDADB table, include all if !include
838
+ #
839
+ def get_field_keys(tname, include = None, exclude = None):
840
+
841
+ fields = ''
842
+ hash = TBLHASH[tname]
843
+
844
+ for fld in hash:
845
+ if include and include.find(fld) < 0: continue
846
+ if exclude and exclude.find(fld) > -1: continue
847
+ opt = hash[fld][0]
848
+ if opt in params: fields += fld
849
+
850
+ return fields if fields else None
851
+
852
+ #
853
+ # get a string for fields of a RDADB table
854
+ #
855
+ def get_string_fields(flds, tname, include = None, exclude = None):
856
+
857
+ fields = []
858
+ hash = TBLHASH[tname]
859
+
860
+ for fld in flds:
861
+ ufld = fld.upper() # in case
862
+ if include and include.find(ufld) < 0: continue
863
+ if exclude and exclude.find(ufld) > -1: continue
864
+ if ufld not in hash:
865
+ PgLOG.pglog("Invalid field '{}' to get from '{}'".format(fld, tname), PGOPT['extlog'])
866
+ elif hash[ufld][0] not in OPTS:
867
+ PgLOG.pglog("Option '{}' is not defined for field '{} - {}'".format(hash[ufld][0], ufld, hash[ufld][1]), PGOPT['extlog'])
868
+ if len(hash[ufld]) == 4:
869
+ fname = "{} {}".format(hash[ufld][3], hash[ufld][1])
870
+ else:
871
+ fname = hash[ufld][1]
872
+ fields.append(fname)
873
+
874
+ return ', '.join(fields)
875
+
876
+ #
877
+ # get max count for given options
878
+ #
879
+ def get_max_count(opts):
880
+
881
+ count = 0
882
+ for opt in opts:
883
+ if opt not in params: continue
884
+ cnt = len(params[opt])
885
+ if cnt > count: count = cnt
886
+
887
+ return count
888
+
889
+ #
890
+ # get a string of fields of a RDADB table for sorting
891
+ #
892
+ def get_order_string(flds, tname, exclude = None):
893
+
894
+ orders = []
895
+ hash = TBLHASH[tname]
896
+
897
+ for fld in flds:
898
+ if fld.islower():
899
+ desc = " DESC"
900
+ fld = fld.upper()
901
+ else:
902
+ desc = ""
903
+ if exclude and exclude.find(fld) > -1: continue
904
+ orders.append(hash[fld][1] + desc)
905
+
906
+ return (" ORDER BY " + ', '.join(orders)) if orders else ''
907
+
908
+ #
909
+ # get a string for column titles of a given table
910
+ #
911
+ def get_string_titles(flds, hash, lens):
912
+
913
+ titles = []
914
+ colcnt = len(flds)
915
+ for i in range(colcnt):
916
+ fld = flds[i]
917
+ if fld not in hash: continue
918
+ opt = hash[fld][0]
919
+ if opt not in OPTS: PgLOG.pglog("ERROR: Undefined option " + opt, PGOPT['extlog'])
920
+ title = OPTS[opt][1]
921
+ if lens:
922
+ if len(title) > lens[i]: title = opt
923
+ title = "{:{}}".format(title, lens[i])
924
+ titles.append(title)
925
+
926
+ return params['DV'].join(titles) + params['DV']
927
+
928
+ #
929
+ # display error message and exit
930
+ #
931
+ def parameter_error(p, opt = None, lidx = 0, line = 0, infile = None):
932
+
933
+ if not opt:
934
+ errmsg = "value passed in without leading info option"
935
+ elif opt == "continue":
936
+ errmsg = "error processing input file on continue Line"
937
+ elif opt == 'specified':
938
+ errmsg = "option -{}/-{} is specified already".format(p, OPTS[p][1])
939
+ elif opt == "mixed":
940
+ errmsg = "single-value option mixed with multi-value option"
941
+ elif opt == "missact":
942
+ errmsg = "No Action Option is specified"
943
+ elif opt == "missval":
944
+ errmsg = "No value provided following Info Option"
945
+ elif opt == 'duplicate':
946
+ errmsg = "multiple actions not allowed"
947
+ elif opt == "delayed":
948
+ errmsg = "delayed Mode option not supported"
949
+ elif OPTS[opt][0] == 0:
950
+ errmsg = "value follows Mode Option -{}/-{}".format(opt, OPTS[opt][1])
951
+ elif OPTS[opt][0] == 1:
952
+ errmsg = "multiple values follow single-value Option -{}/-{}".format(opt, OPTS[opt][1])
953
+ elif OPTS[opt][0] >= 4:
954
+ errmsg = "value follows Action Option -{}/-{}".format(opt, OPTS[opt][1])
955
+ else:
956
+ errmsg = None
957
+
958
+ if errmsg:
959
+ if lidx:
960
+ input_error(lidx, line, infile, "{} - {}".format(p, errmsg))
961
+ else:
962
+ PgLOG.pglog("ERROR: {} - {}".format(p, errmsg), PGOPT['extlog'])
963
+
964
+ #
965
+ # wrap function to PgLOG.pglog() for error in input files
966
+ #
967
+ def input_error(lidx, line, infile, errmsg):
968
+
969
+ PgLOG.pglog("ERROR at {}({}): {}\n {}".format(infile, lidx, line, errmsg), PGOPT['extlog'])
970
+
971
+ #
972
+ # wrap function to PgLOG.pglog() for error for action
973
+ #
974
+ def action_error(errmsg, cact = None):
975
+
976
+ msg = "ERROR"
977
+ if PGOPT['ANAME']: msg += " " + PGOPT['ANAME']
978
+ if not cact: cact = PGOPT['CACT']
979
+ if cact: msg += " for Action {} ({})".format(cact, OPTS[cact][1])
980
+
981
+ if 'DS' in params:
982
+ if OPTS['DS'][0] == 1:
983
+ msg += " of " + params['DS']
984
+ elif OPTS['DS'][0] == 2 and len(params['DS']) == 1:
985
+ msg += " of " + params['DS'][0]
986
+
987
+ msg += ": " + errmsg
988
+ if PgLOG.PGLOG['DSCHECK']: PgDBI.record_dscheck_error(msg, PGOPT['extlog'])
989
+ PgLOG.pglog(msg, PGOPT['extlog'])
990
+
991
+ #
992
+ # get the valid option for given parameter by checking if the given option
993
+ # name matches either an valid option key (short name) or its long name
994
+ # flag: 1 - value key only, 2 - multi-value key only, 3 - action key only,
995
+ # 4 - mode&action key only
996
+ #
997
+ def get_option_key(p, flag = 0, skip = 0, lidx = 0, line = None, infile = None, table = None):
998
+
999
+ if p is None: p = ''
1000
+ opt = get_short_option(p)
1001
+ errmsg = None
1002
+ if opt:
1003
+ if flag == 1:
1004
+ if OPTS[opt][0]&3 == 0: errmsg = "NOT a Value Option"
1005
+ elif flag == 2:
1006
+ if OPTS[opt][0]&2 == 0: errmsg = "NOT a Multi-Value Option"
1007
+ elif flag == 3:
1008
+ if OPTS[opt][0] < 4:
1009
+ if lidx:
1010
+ errmsg = "NOT an Action Option"
1011
+ else:
1012
+ errmsg = "Miss leading '-' for none action option"
1013
+ elif flag == 4:
1014
+ if OPTS[opt][0]&3:
1015
+ errmsg = "NOT a Mode/Action Option"
1016
+ if errmsg: errmsg = "{}({}) - {}".format(opt, OPTS[opt][1], errmsg)
1017
+ elif not skip:
1018
+ if p:
1019
+ errmsg = "-{} - Unknown Option".format(p)
1020
+ else:
1021
+ errmsg = "'' - Empty Option Name"
1022
+
1023
+ if errmsg:
1024
+ if lidx:
1025
+ input_error(lidx, line, infile, errmsg)
1026
+ else:
1027
+ PgLOG.pglog("ERROR: " + errmsg, PGOPT['extlog'])
1028
+ elif opt and (table or PGOPT['IFCNT'] and OPTS[opt][0] == 2):
1029
+ INOPTS[opt] = 1
1030
+
1031
+ return opt
1032
+
1033
+ #
1034
+ # set values to given options, ignore options set in input files if the options
1035
+ # already set on command line
1036
+ #
1037
+ def set_option_value(opt, val = None, cnl = 0, lidx = 0, line = None, infile = None):
1038
+
1039
+ if opt in CMDOPTS and lidx: # in input file, but given on command line already
1040
+ if opt not in params: params[opt] = CMDOPTS[opt]
1041
+ return
1042
+
1043
+ if val is None: val = ''
1044
+ if OPTS[opt][0]&3:
1045
+ if OPTS[opt][2]&16:
1046
+ if not val:
1047
+ val = 0
1048
+ elif re.match(r'^\d+$', val):
1049
+ val = int(val)
1050
+ elif val and (opt == 'DS' or opt == 'OD'):
1051
+ val = PgUtil.format_dataset_id(val)
1052
+
1053
+ errmsg = None
1054
+ if not cnl and OPTS[opt][0]&3:
1055
+ if opt in params:
1056
+ if OPTS[opt][0] == 2:
1057
+ if OPTS[opt][2]&2: del params[opt] # clean auto set values
1058
+ elif params[opt] != val and not OPTS[opt][2]&1:
1059
+ errmsg = "'{}', multiple values not allowed for Single-Value Option".format(val)
1060
+ if not errmsg and (not PGOPT['CACT'] or OPTS[PGOPT['CACT']][2]):
1061
+ dstr = OPTS[opt][3] if len(OPTS[opt]) > 3 else None
1062
+ if dstr:
1063
+ vlen = len(val)
1064
+ ms = re.match(r'^!(\w*)', dstr)
1065
+ if ms:
1066
+ dstr = ms.group(1)
1067
+ if vlen == 1 and dstr.find(val) > -1: errmsg = "{}: character must not be one of '{}'".format(val, str)
1068
+ elif vlen > 1 or (vlen == 0 and not OPTS[opt][2]&128) or (vlen == 1 and dstr.find(val) < 0):
1069
+ errmsg = "{} single-letter value must be one of '{}'".format(val, dstr)
1070
+
1071
+ if not errmsg:
1072
+ if OPTS[opt][0] == 2: # multiple value option
1073
+ if opt not in params:
1074
+ params[opt] = [val] # set the first value
1075
+ if opt == 'QF' and PGOPT['ACTS'] == OPTS['DL'][0]: OPTS['FS'][3] = 'ANT'
1076
+ else:
1077
+ if cnl:
1078
+ rowidx = len(params[opt]) - 1
1079
+ if params[opt][rowidx]:
1080
+ if not re.match(r'^(DE|DI|DM|DW)$', opt):
1081
+ errmsg = "Multi-line value not allowed"
1082
+ else:
1083
+ params[opt][rowidx] += "\n" + val # multiple line value
1084
+ else:
1085
+ params[opt][rowidx] = val
1086
+ else:
1087
+ params[opt].append(val) # add next value
1088
+ elif OPTS[opt][0] == 1: # single value option
1089
+ if cnl and opt in params:
1090
+ if val: errmsg = "Multi-line value not allowed"
1091
+ elif OPTS[opt][2]&2 and PgUtil.pgcmp(params[opt], val):
1092
+ errmsg = "{}: Single-Value Info Option has value '{}' already".format(val, params[opt])
1093
+ else:
1094
+ params[opt] = val
1095
+ OPTS[opt][2] |= 2
1096
+ elif val:
1097
+ if OPTS[opt][0] == 0 and re.match(r'^(Y|N)$', val, re.I):
1098
+ params[opt] = 1 if (val == 'Y' or val == 'y') else 0
1099
+ else:
1100
+ parameter_error(val, opt, lidx, line, infile) # no value for flag or action options
1101
+ elif opt not in params:
1102
+ params[opt] = 1 # set flag or action option
1103
+ if OPTS[opt][0] > 2:
1104
+ if PGOPT['ACTS']: parameter_error(opt, "duplicate", lidx ,line, infile) # no duplicated action options
1105
+ PGOPT['ACTS'] = OPTS[opt][0] # add action bit
1106
+ PGOPT['CACT'] = opt # add action name
1107
+ if opt == "SB": PGOPT['MSET'] = opt
1108
+
1109
+ if errmsg:
1110
+ if lidx:
1111
+ input_error(lidx, line, infile, "{}({}) - {}".format(opt, OPTS[opt][1], errmsg))
1112
+ else:
1113
+ PgLOG.pglog("ERROR: {}({}) - {}".format(opt, OPTS[opt][1], errmsg), PGOPT['extlog'])
1114
+
1115
+ if not lidx: CMDOPTS[opt] = params[opt] # record options set on command lines
1116
+
1117
+ #
1118
+ # get width for a single row if in column format
1119
+ #
1120
+ def get_row_width(pgrec):
1121
+
1122
+ slen = len(params['DV'])
1123
+ width = 0
1124
+ for key in pgrec:
1125
+ wd = 0
1126
+ for val in pgrec[key]:
1127
+ if not val: continue
1128
+ if not isinstance(val, str): val = str(val)
1129
+ if key == 'note':
1130
+ vlen = val.find('\n') + 1
1131
+ else:
1132
+ vlen = 0
1133
+ if vlen < 1: vlen = len(val)
1134
+ if vlen > wd: wd = vlen # get max width of each column
1135
+
1136
+ # accumulate all column width plus length of delimiter to get row width
1137
+ if width: width += slen
1138
+ width += wd
1139
+
1140
+ return width
1141
+
1142
+ #
1143
+ # get a short option name by searching dict OPTS and ALIAS
1144
+ #
1145
+ def get_short_option(p):
1146
+
1147
+ plen = len(p)
1148
+ if plen == 2:
1149
+ p = p.upper()
1150
+ if p in OPTS: return p
1151
+
1152
+ for opt in OPTS: # get main option first
1153
+ if not PgUtil.pgcmp(OPTS[opt][1], p, 1): return opt
1154
+
1155
+ for opt in ALIAS: # then check alias option
1156
+ for key in ALIAS[opt]:
1157
+ if not PgUtil.pgcmp(key, p, 1): return opt
1158
+
1159
+ return None
1160
+
1161
+ #
1162
+ # print result in column format, with multiple values each row
1163
+ #
1164
+ def print_column_format(pgrec, flds, hash, lens, retbuf = 0):
1165
+
1166
+ rowcnt = -1
1167
+ colcnt = len(flds)
1168
+ buf = ''
1169
+ fields = []
1170
+ flens = []
1171
+ for i in range(colcnt):
1172
+ fld = flds[i]
1173
+ if fld in hash:
1174
+ fld = hash[fld][1]
1175
+ ms = re.search(r'\.(.+)$', fld)
1176
+ if ms: fld = ms.group(1)
1177
+ if fld in pgrec:
1178
+ fields.append(fld)
1179
+ flens.append((lens[i] if lens else 0))
1180
+ if rowcnt < 0: rowcnt = len(pgrec[fld])
1181
+ else:
1182
+ PgLOG.pglog(fld + ": Unkown field name", PGOPT['extlog'])
1183
+
1184
+ colcnt = len(fields)
1185
+ for i in range(rowcnt):
1186
+ offset = 0
1187
+ values = []
1188
+ for j in range(colcnt):
1189
+ fld = fields[j]
1190
+ idx = -1
1191
+ val = pgrec[fld][i]
1192
+ slen = flens[j]
1193
+ if val is None:
1194
+ val = ''
1195
+ elif isinstance(val, str):
1196
+ idx = val.find("\n")
1197
+ if idx > 0:
1198
+ val = "\n" + val
1199
+ idx = 0
1200
+ else:
1201
+ val = str(val)
1202
+ if slen:
1203
+ if idx < 0:
1204
+ val = "{:{}}".format(val, slen)
1205
+ else:
1206
+ val += "\n{:{}}".format(' ', offset)
1207
+ offset += slen
1208
+ values.append(val)
1209
+ line = params['DV'].join(values) + params['DV'] + "\n"
1210
+ if retbuf:
1211
+ buf += line
1212
+ else:
1213
+ OUTPUT.write(line)
1214
+
1215
+ return buf if retbuf else rowcnt
1216
+
1217
+ #
1218
+ # print result in row format, with single value on each row
1219
+ #
1220
+ def print_row_format(pgrec, flds, hash):
1221
+
1222
+ for fld in flds:
1223
+ if fld not in hash: continue
1224
+ line = "{}{}".format(OPTS[hash[fld][0]][1], params['ES'])
1225
+ field = hash[fld][1]
1226
+ ms = re.search(r'\.(.+)$', field)
1227
+ if ms: field = ms.group(1)
1228
+ if field in pgrec:
1229
+ value = pgrec[field]
1230
+ if value is not None: line += str(value)
1231
+ OUTPUT.write(line + "\n")
1232
+
1233
+ #
1234
+ # compress/uncompress given files and change the formats accordingly
1235
+ #
1236
+ def compress_files(files, formats, count):
1237
+
1238
+ if 'UZ' in params:
1239
+ strcmp = 'Uncompress'
1240
+ actcmp = 0
1241
+ else:
1242
+ strcmp = 'Compress'
1243
+ actcmp = 1
1244
+ fmtcnt = len(formats)
1245
+ if not fmtcnt: return files # just in case
1246
+ s = 's' if count > 1 else ''
1247
+ PgLOG.pglog("{}ing {} File{} for {} ...".format(strcmp, count, s, params['DS']), PGOPT['wrnlog'])
1248
+ cmpcnt = 0
1249
+ for i in range(count):
1250
+ fmt = formats[i] if(i < fmtcnt and formats[i]) else formats[0]
1251
+ (ofile, fmt) = PgFile.compress_local_file(files[i], fmt, actcmp, PGOPT['extlog'])
1252
+ if ofile != files[i]:
1253
+ files[i] = ofile
1254
+ cmpcnt += 1
1255
+
1256
+ PgLOG.pglog("{}/{} Files {}ed for {}".format(cmpcnt, count, strcmp, params['DS']) , PGOPT['emllog'])
1257
+
1258
+ if 'ZD' in params: del params['ZD']
1259
+ if 'UZ' in params: del params['UZ']
1260
+
1261
+ return files
1262
+
1263
+ #
1264
+ # get hash condition
1265
+ # tname - table name to identify a table hash
1266
+ # noand - 1 for not add leading 'AND'
1267
+ #
1268
+ def get_hash_condition(tname, include = None, exclude = None, noand = 0):
1269
+
1270
+ condition = ''
1271
+ hash = TBLHASH[tname]
1272
+
1273
+ for key in hash:
1274
+ if include and include.find(key) < 0: continue
1275
+ if exclude and exclude.find(key) > -1: continue
1276
+ opt = hash[key][0]
1277
+ if opt not in params: continue # no option value
1278
+ flg = hash[key][2]
1279
+ if flg < 0: # condition is ignore for this option
1280
+ PgLOG.pglog("Condition given per Option -{} (-{}) is ignored".format(opt, OPTS[opt][1]), PGOPT['errlog'])
1281
+ continue
1282
+
1283
+ fld = hash[key][1]
1284
+ condition += PgDBI.get_field_condition(fld, params[opt], flg, noand)
1285
+ noand = 0
1286
+
1287
+ return condition
1288
+
1289
+ #
1290
+ # set default params value for given opt empty the value if 'all' is given
1291
+ #
1292
+ def set_default_value(opt, dval = None):
1293
+
1294
+ flag = OPTS[opt][0]
1295
+ if flag&3 == 0: return # skip if not single&multiple value options
1296
+
1297
+ oval = 0
1298
+ if opt in params:
1299
+ if flag == 1:
1300
+ oval = params[opt]
1301
+ else:
1302
+ count = len(params[opt])
1303
+ if count == 1:
1304
+ oval = params[opt][0]
1305
+ elif count > 1:
1306
+ return # multiple values given already
1307
+
1308
+ if oval:
1309
+ if re.match(r'^all$', oval, re.I):
1310
+ del params[opt] # remove option value for all
1311
+ return # value given already
1312
+
1313
+ if dval:
1314
+ # set default value
1315
+ if flag == 1:
1316
+ params[opt] = dval
1317
+ else:
1318
+ params[opt] = [dval]
1319
+
1320
+ #
1321
+ # add/strip COS block for give file name and cosflg if given/not-given cosfile
1322
+ # return the file size after the convertion
1323
+ #
1324
+ def cos_convert(locfile, cosflg, cosfile = None):
1325
+
1326
+ if cosfile:
1327
+ cmd = "cosconvert -{} {} {}".format(cosflg, cosfile, locfile)
1328
+ else:
1329
+ cmd = "cosconvert -{} {}".format(cosflg.lower(), locfile)
1330
+ cosfile = locfile
1331
+
1332
+ PgLOG.pgsystem(cmd)
1333
+ info = PgFile.check_local_file(cosfile)
1334
+ if not info:
1335
+ return PgLOG.pglog("Error - " + cmd, PGOPT['errlog']) # should not happen
1336
+ else:
1337
+ return info['data_size']
1338
+
1339
+ #
1340
+ # evaluate count of values for given options
1341
+ #
1342
+ def get_option_count(opts):
1343
+
1344
+ count = 0
1345
+ for opt in opts:
1346
+ if opt in params:
1347
+ cnt = len(params[opt])
1348
+ if cnt > count: count = cnt
1349
+ if count > 0: validate_multiple_options(count, opts)
1350
+
1351
+ return count
1352
+
1353
+ #
1354
+ # gather subgroup indices recursively for given condition
1355
+ # dsid: Dataset Id
1356
+ # pidx: parent group index
1357
+ # gtype: group type if not empty, P - public groups only)
1358
+ #
1359
+ # Return: array reference of group indices
1360
+ #
1361
+ def get_all_subgroups(dcnd, pidx, gtype = None):
1362
+
1363
+ gidxs = [pidx]
1364
+ gflds = "gindex"
1365
+ if gtype: gflds += ", grptype"
1366
+ grecs = PgDBI.pgmget("dsgroup", gflds, "{} and pindex = {}".format(dcnd, pidx), PgLOG.LGWNEX)
1367
+ if not grecs: return gidxs
1368
+
1369
+ gcnt = len(grecs['gindex'])
1370
+ for i in range(gcnt):
1371
+ gidx = grecs['gindex'][i]
1372
+ if abs(gidx) <= abs(pidx) or gtype and grecs['grptype'][i] != gtype: continue
1373
+ subs = get_all_subgroups(dcnd, gidx, gtype)
1374
+ gidxs.extend(subs)
1375
+
1376
+ return gidxs
1377
+
1378
+ #
1379
+ # gather public subgroup indices recursively for given condition. A group index is
1380
+ # gathered only if there are data files right under it. The pidx is included too
1381
+ # if file count of it larger then zero.
1382
+ # dsid: Dataset Id
1383
+ # pidx: parent group index
1384
+ # cfld: count field (dwebcnt, nwebcnt, savedcnt)
1385
+ # pfcnt: file count for parent group index pidx 0 to skip)
1386
+ #
1387
+ # Return: array reference of group indices
1388
+ #
1389
+ def get_data_subgroups(dcnd, pidx, cfld, pfcnt = 0):
1390
+
1391
+ if not pfcnt: # get file count for the parent group
1392
+ pfcnt = group_file_count(dcnd, pidx, cfld)
1393
+ if not pfcnt: return None
1394
+
1395
+ gflds = "gindex, " + cfld
1396
+ gcnd = "{} AND pindex = {} AND {} > 0".format(dcnd, pidx, cfld)
1397
+ grecs = PgDBI.pgmget("dsgroup", gflds, gcnd, PgLOG.LGWNEX)
1398
+ if not grecs: return ([pidx] if pfcnt > 0 else None)
1399
+
1400
+ gcnt = len(grecs['gindex'])
1401
+ gidxs = []
1402
+ for i in range(gcnt):
1403
+ gidx = grecs['gindex'][i]
1404
+ fcnt = grecs[cfld][i]
1405
+ if fcnt == 0 or abs(gidx) <= abs(pidx): continue
1406
+ subs = get_data_subgroups(dcnd, gidx, cfld, fcnt)
1407
+ if subs: gidxs.extend(subs)
1408
+ pfcnt -= fcnt
1409
+ if pfcnt > 0: gidxs.insert(0, pidx)
1410
+
1411
+ return (gidxs if gidxs else None)
1412
+
1413
+ #
1414
+ # get group file count for given count field name
1415
+ #
1416
+ def group_file_count(cnd, gidx, cfld):
1417
+
1418
+ if gidx:
1419
+ table = "dsgroup"
1420
+ cnd += " AND gindex = {}".format(gidx)
1421
+ else:
1422
+ table = "dataset"
1423
+ pgrec = PgDBI.pgget(table, cfld, cnd)
1424
+
1425
+ return (pgrec[cfld] if pgrec else 0)
1426
+
1427
+ #
1428
+ # set file format for actions -AM/-AW from given local files
1429
+ #
1430
+ def set_file_format(count):
1431
+
1432
+ if 'LF' in params:
1433
+ files = params['LF']
1434
+ else:
1435
+ return
1436
+
1437
+ fmtcnt = 0
1438
+ fmts = [None] * count
1439
+ for i in range(count):
1440
+ fmt = PgFile.get_file_format(files[i])
1441
+ if fmt:
1442
+ fmtcnt += 1
1443
+ fmts[i] = fmt
1444
+
1445
+ if fmtcnt:
1446
+ params['AF'] = fmts
1447
+ OPTS['AF'][2] |= 2
1448
+
1449
+ #
1450
+ # get frequency information
1451
+ #
1452
+ def get_control_frequency(frequency):
1453
+
1454
+ val = nf = 0
1455
+ unit = None
1456
+ ms = re.match(r'^(\d+)([YMWDHNS])$', frequency, re.I)
1457
+ if ms:
1458
+ val = int(ms.group(1))
1459
+ unit = ms.group(2).upper()
1460
+ else:
1461
+ ms = re.match(r'^(\d+)M/(\d+)', frequency, re.I)
1462
+ if ms:
1463
+ val = int(ms.group(1))
1464
+ nf = int(ms.group(2))
1465
+ unit = 'M'
1466
+ if nf < 2 or nf > 10 or (30%nf): val = 0
1467
+
1468
+ if not val:
1469
+ if nf:
1470
+ unit = "fraction of month frequency '{}' MUST be (2,3,5,6,10)".format(frequency)
1471
+ elif unit:
1472
+ val = "frequency '{}' MUST be larger than 0".format(frequency)
1473
+ elif re.search(r'/(\d+)$', frequency):
1474
+ val = "fractional frequency '{}' for month ONLY".format(frequency)
1475
+ else:
1476
+ val = "invalid frequency '{}', unit must be (Y,M,W,D,H)".format(frequency)
1477
+ return (None, unit)
1478
+
1479
+ freq = [0]*7 # initialize the frequence list
1480
+ uidx = {'Y' : 0, 'D' : 2, 'H' : 3, 'N' : 4, 'S' : 5}
1481
+ if unit == 'M':
1482
+ freq[1] = val
1483
+ if nf: freq[6] = nf # number of fractions in a month
1484
+ elif unit == 'W':
1485
+ freq[2] = 7 * val
1486
+ elif unit in uidx:
1487
+ freq[uidx[unit]] = val
1488
+
1489
+ return (freq, unit)
1490
+
1491
+ #
1492
+ # check if valid data time for given pindex
1493
+ #
1494
+ def valid_data_time(pgrec, cstr = None, logact = 0):
1495
+
1496
+ if pgrec['pindex'] and pgrec['datatime']:
1497
+ (freq, unit) = get_control_frequency(pgrec['frequency'])
1498
+ if not freq:
1499
+ if cstr: PgLOG.pglog("{}: {}".format(cstr, unit), logact)
1500
+ return PgLOG.FAILURE
1501
+
1502
+ dtime = PgUtil.adddatetime(pgrec['datatime'], freq[0], freq[1], freq[2], freq[3], freq[4], freq[5], freq[6])
1503
+ if PgDBI.pgget("dcupdt", "", "cindex = {} AND datatime < '{}'".format(pgrec['pindex'], dtime), PGOPT['extlog']):
1504
+ if cstr: PgLOG.pglog("{}: MUST be processed After Control Index {}".format(cstr, pgrec['pindex']), logact)
1505
+ return PgLOG.FAILURE
1506
+
1507
+ return PgLOG.SUCCESS
1508
+
1509
+ #
1510
+ # publish filelists for given datasets
1511
+ #
1512
+ def publish_dataset_filelist(dsids):
1513
+
1514
+ for dsid in dsids:
1515
+ PgLOG.pgsystem("publish_filelist " + dsid, PGOPT['wrnlog'], 7)
1516
+
1517
+ #
1518
+ # get the current active version index for given dsid
1519
+ #
1520
+ def get_version_index(dsid, logact = 0):
1521
+
1522
+ pgrec = PgDBI.pgget("dsvrsn", "vindex", "dsid = '{}' AND status = 'A'".format(dsid), logact)
1523
+
1524
+ return (pgrec['vindex'] if pgrec else 0)
1525
+
1526
+ #
1527
+ # append given format (data or archive) sfmt to format string sformat
1528
+ #
1529
+ def append_format_string(sformat, sfmt, chkend = 0):
1530
+
1531
+ mp = r'(^|\.){}$' if chkend else r'(^|\.){}(\.|$)'
1532
+ if sfmt:
1533
+ if not sformat:
1534
+ sformat = sfmt
1535
+ else:
1536
+ for fmt in re.split(r'\.', sfmt):
1537
+ if not re.search(mp.format(fmt), sformat, re.I): sformat += '.' + fmt
1538
+
1539
+ return sformat
1540
+
1541
+ #
1542
+ # get request type string or shared info
1543
+ #
1544
+ def request_type(rtype, idx = 0):
1545
+
1546
+ RTYPE = {
1547
+ 'C' : ["Customized Data", 0],
1548
+ 'D' : ["CDP Link", 0],
1549
+ 'M' : ["Delayed Mode Data", 1],
1550
+ 'N' : ["NCARDAP(THREDDS) Data Server", 0],
1551
+ 'Q' : ["Database Query", 0],
1552
+ 'R' : ["Realtime Data", 0],
1553
+ 'S' : ["Subset Data", 0],
1554
+ 'T' : ["Subset/Format-Conversion Data", 0],
1555
+ 'F' : ["Format Conversion Data", 1], # web
1556
+ 'A' : ["Archive Format Conversion", 1], # web
1557
+ 'P' : ["Plot Chart", 0],
1558
+ 'U' : ["Data", 0]
1559
+ }
1560
+
1561
+ if rtype not in RTYPE: rtype = 'U'
1562
+
1563
+ return RTYPE[rtype][idx]
1564
+
1565
+ #
1566
+ # email notice of for user
1567
+ #
1568
+ def send_request_email_notice(pgrqst, errmsg, fcount, rstat, readyfile = None, pgpart = None):
1569
+
1570
+ pgcntl = PGOPT['RCNTL']
1571
+ rhome = params['WH'] if 'WH' in params and params['WH'] else PgLOG.PGLOG['RQSTHOME']
1572
+ if errmsg:
1573
+ if pgpart:
1574
+ if cache_partition_email_error(pgpart['rindex'], errmsg): return rstat
1575
+ enote = "email_part_error"
1576
+ else:
1577
+ enote = "email_error"
1578
+ elif fcount == 0:
1579
+ if pgcntl and pgcntl['empty_out'] == 'Y':
1580
+ enote = "email_empty"
1581
+ else:
1582
+ errmsg = "NO output data generated"
1583
+ if pgpart:
1584
+ if cache_partition_email_error(pgpart['rindex'], errmsg): return rstat
1585
+ enote = "email_part_error"
1586
+ else:
1587
+ enote = "email_error"
1588
+ elif 'EN' in params and params['EN'][0]:
1589
+ enote = params['EN'][0]
1590
+ elif pgrqst['enotice']:
1591
+ enote = pgrqst['enotice']
1592
+ elif pgcntl and pgcntl['enotice']:
1593
+ enote = pgcntl['enotice']
1594
+ elif pgrqst['globus_transfer'] == 'Y' and pgrqst['task_id']:
1595
+ enote = "email_notice_globus"
1596
+ else:
1597
+ enote = "email_" + ("command" if pgrqst['location'] else "notice")
1598
+
1599
+ if enote[0] not in '/.': enote = "{}/notices/{}".format(rhome, enote)
1600
+
1601
+ finfo = PgFile.check_local_file(enote, 128)
1602
+ if not finfo:
1603
+ if finfo is None:
1604
+ ferror = "file not exists"
1605
+ else:
1606
+ ferror = "Error check file"
1607
+ else:
1608
+ ef = open(enote, 'r') # open email notice file
1609
+ ferror = None
1610
+
1611
+ if ferror:
1612
+ if errmsg:
1613
+ PgLOG.pglog("{}: {}\nCannot email error to {}@ucar.edu: {}".format(enote, ferror, PgLOG.PGLOG['CURUID'], errmsg),
1614
+ (PGOPT['errlog'] if rstat else PGOPT['extlog']))
1615
+ return "E"
1616
+ else:
1617
+ errmsg = PgLOG.pglog("{}: {}\nCannot email notice to {}".format(enote, ferror, pgrqst['email']), PGOPT['errlog']|PgLOG.RETMSG)
1618
+ enote = rhome + "/notices/email_error"
1619
+ ef = open(enote, 'r')
1620
+ rstat = 'E'
1621
+
1622
+ ebuf = ''
1623
+ ebuf += ef.read()
1624
+ ef.close()
1625
+
1626
+ einfo = {}
1627
+ einfo['HOSTNAME'] = PgLOG.PGLOG['HOSTNAME']
1628
+ einfo['DSID'] = pgrqst['dsid']
1629
+ einfo['DSSURL'] = PgLOG.PGLOG['DSSURL']
1630
+ if pgrqst['location']:
1631
+ einfo['WHOME'] = pgrqst['location']
1632
+ else:
1633
+ einfo['WHOME'] = PgLOG.PGLOG['RQSTURL']
1634
+ einfo['SENDER'] = pgrqst['specialist'] + "@ucar.edu"
1635
+ einfo['RECEIVER'] = pgrqst['email']
1636
+ einfo['RTYPE'] = request_type(pgrqst['rqsttype'])
1637
+ PgLOG.add_carbon_copy() # clean carbon copy email in case not empty
1638
+ exclude = (einfo['SENDER'] if errmsg else einfo['RECEIVER'])
1639
+ if not errmsg and pgcntl and pgcntl['ccemail']:
1640
+ PgLOG.add_carbon_copy(pgcntl['ccemail'], 1, exclude, pgrqst['specialist'])
1641
+ if PgLOG.PGLOG['CURUID'] != pgrqst['specialist'] and PgLOG.PGLOG['CURUID'] != PgLOG.PGLOG['GDEXUSER']:
1642
+ PgLOG.add_carbon_copy(PgLOG.PGLOG['CURUID'], 1, exclude)
1643
+ if 'CC' in params: PgLOG.add_carbon_copy(params['CC'], 0, exclude)
1644
+ einfo['CCD'] = PgLOG.PGLOG['CCDADDR']
1645
+ einfo['RINDEX'] = str(pgrqst['rindex'])
1646
+ einfo['RQSTID'] = pgrqst['rqstid']
1647
+ pgrec = PgDBI.pgget("dataset", "title", "dsid = '{}'".format(pgrqst['dsid']), PGOPT['extlog'])
1648
+ einfo['DSTITLE'] = pgrec['title'] if pgrec and pgrec['title'] else ''
1649
+ einfo['SUBJECT'] = ''
1650
+ if errmsg:
1651
+ einfo['ERRMSG'] = PgLOG.get_error_command(int(time.time()), PGOPT['errlog']) + errmsg
1652
+ einfo['SUBJECT'] = "Error "
1653
+ if pgpart:
1654
+ einfo['PARTITION'] = " partition"
1655
+ einfo['PTIDX'] = "(PTIDX{})".format(pgpart['pindex'])
1656
+ einfo['SUBJECT'] += "Process Partitions of "
1657
+ else:
1658
+ einfo['PARTITION'] = einfo['PTIDX'] = ''
1659
+ einfo['SUBJECT'] += "Build "
1660
+ einfo['SUBJECT'] += "{} Rqst{} from {}".format(einfo['RTYPE'], pgrqst['rindex'], pgrqst['dsid'])
1661
+ else:
1662
+ if fcount == 0:
1663
+ einfo['SUBJECT'] += "NO Output:"
1664
+ else:
1665
+ einfo['SUBJECT'] += "Completed:"
1666
+ einfo['DAYS'] = str(PGOPT['VP'])
1667
+ pgrec = PgDBI.pgget("dssgrp", "lstname, fstname, phoneno",
1668
+ "logname = '{}'".format(PgLOG.PGLOG['CURUID']), PGOPT['extlog'])
1669
+ if pgrec:
1670
+ einfo['SPECIALIST'] = "{} {}".format(pgrec['fstname'], pgrec['lstname'])
1671
+ einfo['PHONENO'] = pgrec['phoneno']
1672
+ einfo['SUBJECT'] += f" {pgrqst['dsid']} {einfo['RTYPE']} request {pgrqst['rindex']}"
1673
+
1674
+ if pgrqst['note']:
1675
+ einfo['RNOTE'] = "\nRequest Detail:\n{}\n".format(pgrqst['note'])
1676
+ elif fcount > 0 and pgrqst['rinfo']:
1677
+ einfo['RNOTE'] = "\nRequest Detail:\n{}\n".format(pgrqst['rinfo'])
1678
+ else:
1679
+ einfo['RNOTE'] = ""
1680
+
1681
+ if pgrqst['globus_transfer'] == 'Y' and pgrqst['task_id']:
1682
+ einfo['GLOBUS_TASK_URL'] = "https://app.globus.org/activity/" + pgrqst['task_id']
1683
+
1684
+ for ekey in einfo:
1685
+ if ekey == 'CCD' and not einfo['CCD']:
1686
+ mp = r'Cc:\s*<CCD>\s*'
1687
+ rep = ''
1688
+ else:
1689
+ mp = r'<{}>'.format(ekey)
1690
+ rep = einfo[ekey]
1691
+ if rep is None:
1692
+ PgLOG.pglog("{}.{}: None ekey value for reuqest email".format(pgrqst['rindex'], ekey),
1693
+ PGOPT['wrnlog']|PgLOG.FRCLOG)
1694
+ rep = ''
1695
+ ebuf = re.sub(mp, rep, ebuf)
1696
+
1697
+ if PgLOG.PGLOG['DSCHECK'] and not pgpart:
1698
+ tbl = "dscheck"
1699
+ cnd = "cindex = {}".format(PgLOG.PGLOG['DSCHECK']['cindex'])
1700
+ else:
1701
+ tbl = "dsrqst"
1702
+ cnd = "rindex = {}".format(pgrqst['rindex'])
1703
+
1704
+ if PgLOG.send_customized_email(f"{tbl}.{cnd}", ebuf, 0):
1705
+ if errmsg:
1706
+ PgLOG.pglog("Error Email sent to {} for {}.{}:\n{}".format(einfo['SENDER'], tbl, cnd, errmsg), PGOPT['errlog'])
1707
+ readyfile = None
1708
+ else:
1709
+ PgLOG.pglog("{}Email sent to {} for {}.{}\nSubset: {}".format(("Customized " if pgrqst['enotice'] else ""), einfo['RECEIVER'], tbl, cnd, einfo['SUBJECT']),
1710
+ PGOPT['wrnlog']|PgLOG.FRCLOG)
1711
+ else:
1712
+ if not PgDBI.cache_customized_email(tbl, "einfo", cnd, ebuf, 0): return 'E'
1713
+ if errmsg:
1714
+ PgLOG.pglog("Error Email {} cached to {}.einfo for {}:\n{}".format(einfo['SENDER'], tbl, cnd, errmsg), PGOPT['errlog'])
1715
+ readyfile = None
1716
+ else:
1717
+ PgLOG.pglog("{}Email {} cached to {}.einfo for {}\nSubset: {}".format(("Customized " if pgrqst['enotice'] else ""), einfo['RECEIVER'], tbl, cnd, einfo['SUBJECT']),
1718
+ PGOPT['wrnlog']|PgLOG.FRCLOG)
1719
+
1720
+ if readyfile:
1721
+ rf = open(readyfile, 'w')
1722
+ rf.write(ebuf)
1723
+ rf.close()
1724
+ PgFile.set_local_mode(readyfile, 1, PgLOG.PGLOG['FILEMODE'])
1725
+
1726
+ return rstat
1727
+
1728
+ #
1729
+ # cache partition process error to existing email buffer
1730
+ #
1731
+ def cache_partition_email_error(ridx, errmsg):
1732
+
1733
+ pkey = "<PARTERR>"
1734
+ pgrec = PgDBI.pgget("dsrqst", 'einfo', "rindex = {}".format(ridx), PGOPT['extlog'])
1735
+ if not (pgrec and pgrec['einfo'] and pgrec['einfo'].find(pkey) > -1): return 0
1736
+
1737
+ errmsg = PgLOG.get_error_command(int(time.time()), PGOPT['errlog']) + ("{}\n{}".format(errmsg, pkey))
1738
+ pgrec['einfo'] = re.sub(pkey, errmsg, pgrec['einfo'])
1739
+
1740
+ return PgDBI.pgupdt("dsrqst", pgrec, "rindex = {}".format(ridx), PGOPT['extlog'])