gam7 7.14.4__py3-none-any.whl → 7.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gam7 might be problematic. Click here for more details.
- gam/__init__.py +187 -186
- {gam7-7.14.4.dist-info → gam7-7.15.0.dist-info}/METADATA +1 -1
- {gam7-7.14.4.dist-info → gam7-7.15.0.dist-info}/RECORD +6 -6
- {gam7-7.14.4.dist-info → gam7-7.15.0.dist-info}/WHEEL +0 -0
- {gam7-7.14.4.dist-info → gam7-7.15.0.dist-info}/entry_points.txt +0 -0
- {gam7-7.14.4.dist-info → gam7-7.15.0.dist-info}/licenses/LICENSE +0 -0
gam/__init__.py
CHANGED
|
@@ -25,7 +25,7 @@ https://github.com/GAM-team/GAM/wiki
|
|
|
25
25
|
"""
|
|
26
26
|
|
|
27
27
|
__author__ = 'GAM Team <google-apps-manager@googlegroups.com>'
|
|
28
|
-
__version__ = '7.
|
|
28
|
+
__version__ = '7.15.00'
|
|
29
29
|
__license__ = 'Apache License 2.0 (http://www.apache.org/licenses/LICENSE-2.0)'
|
|
30
30
|
|
|
31
31
|
#pylint: disable=wrong-import-position
|
|
@@ -2251,10 +2251,10 @@ def getMatchSkipFields(fieldNames):
|
|
|
2251
2251
|
return (matchFields, skipFields)
|
|
2252
2252
|
|
|
2253
2253
|
def checkMatchSkipFields(row, fieldnames, matchFields, skipFields):
|
|
2254
|
-
for matchField, matchPattern in
|
|
2254
|
+
for matchField, matchPattern in matchFields.items():
|
|
2255
2255
|
if (matchField not in row) or not matchPattern.search(row[matchField]):
|
|
2256
2256
|
return False
|
|
2257
|
-
for skipField, matchPattern in
|
|
2257
|
+
for skipField, matchPattern in skipFields.items():
|
|
2258
2258
|
if (skipField in row) and matchPattern.search(row[skipField]):
|
|
2259
2259
|
return False
|
|
2260
2260
|
if fieldnames and (GC.Values[GC.CSV_INPUT_ROW_FILTER] or GC.Values[GC.CSV_INPUT_ROW_DROP_FILTER]):
|
|
@@ -2695,7 +2695,7 @@ def printKeyValueListWithCount(kvList, i, count):
|
|
|
2695
2695
|
writeStdout(formatKeyValueList(Ind.Spaces(), kvList, currentCountNL(i, count)))
|
|
2696
2696
|
|
|
2697
2697
|
def printKeyValueDict(kvDict):
|
|
2698
|
-
for key, value in
|
|
2698
|
+
for key, value in kvDict.items():
|
|
2699
2699
|
writeStdout(formatKeyValueList(Ind.Spaces(), [key, value], '\n'))
|
|
2700
2700
|
|
|
2701
2701
|
def printKeyValueWithCRsNLs(key, value):
|
|
@@ -3532,7 +3532,7 @@ def SetGlobalVariables():
|
|
|
3532
3532
|
_printValueError(sectionName, itemName, f'"{filterVal}"', f'{Msg.EXPECTED}: column:filter')
|
|
3533
3533
|
continue
|
|
3534
3534
|
filterDict[column] = filterStr
|
|
3535
|
-
for column, filterStr in
|
|
3535
|
+
for column, filterStr in filterDict.items():
|
|
3536
3536
|
for c in REGEX_CHARS:
|
|
3537
3537
|
if c in column:
|
|
3538
3538
|
columnPat = column
|
|
@@ -3767,7 +3767,7 @@ def SetGlobalVariables():
|
|
|
3767
3767
|
def _verifyValues(sectionName, inputFilterSectionName, outputFilterSectionName):
|
|
3768
3768
|
printKeyValueList([Ent.Singular(Ent.SECTION), sectionName]) # Do not use printEntity
|
|
3769
3769
|
Ind.Increment()
|
|
3770
|
-
for itemName, itemEntry in
|
|
3770
|
+
for itemName, itemEntry in GC.VAR_INFO.items():
|
|
3771
3771
|
sectName = sectionName
|
|
3772
3772
|
if itemName in GC.CSV_INPUT_ROW_FILTER_ITEMS:
|
|
3773
3773
|
if inputFilterSectionName:
|
|
@@ -3778,7 +3778,7 @@ def SetGlobalVariables():
|
|
|
3778
3778
|
cfgValue = GM.Globals[GM.PARSER].get(sectName, itemName)
|
|
3779
3779
|
varType = itemEntry[GC.VAR_TYPE]
|
|
3780
3780
|
if varType == GC.TYPE_CHOICE:
|
|
3781
|
-
for choice, value in
|
|
3781
|
+
for choice, value in itemEntry[GC.VAR_CHOICES].items():
|
|
3782
3782
|
if cfgValue == value:
|
|
3783
3783
|
cfgValue = choice
|
|
3784
3784
|
break
|
|
@@ -3798,7 +3798,7 @@ def SetGlobalVariables():
|
|
|
3798
3798
|
Ind.Decrement()
|
|
3799
3799
|
|
|
3800
3800
|
def _chkCfgDirectories(sectionName):
|
|
3801
|
-
for itemName, itemEntry in
|
|
3801
|
+
for itemName, itemEntry in GC.VAR_INFO.items():
|
|
3802
3802
|
if itemEntry[GC.VAR_TYPE] == GC.TYPE_DIRECTORY:
|
|
3803
3803
|
dirPath = GC.Values[itemName]
|
|
3804
3804
|
if (not dirPath) and (itemName in {GC.GMAIL_CSE_INCERT_DIR, GC.GMAIL_CSE_INKEY_DIR}):
|
|
@@ -3813,7 +3813,7 @@ def SetGlobalVariables():
|
|
|
3813
3813
|
'\n'))
|
|
3814
3814
|
|
|
3815
3815
|
def _chkCfgFiles(sectionName):
|
|
3816
|
-
for itemName, itemEntry in
|
|
3816
|
+
for itemName, itemEntry in GC.VAR_INFO.items():
|
|
3817
3817
|
if itemEntry[GC.VAR_TYPE] == GC.TYPE_FILE:
|
|
3818
3818
|
fileName = GC.Values[itemName]
|
|
3819
3819
|
if (not fileName) and (itemName in {GC.EXTRA_ARGS, GC.CMDLOG}):
|
|
@@ -3916,18 +3916,18 @@ def SetGlobalVariables():
|
|
|
3916
3916
|
GC.Defaults[GC.DRIVE_DIR] = os.path.join(homePath, 'Downloads')
|
|
3917
3917
|
GM.Globals[GM.GAM_CFG_FILE] = os.path.join(GM.Globals[GM.GAM_CFG_PATH], FN_GAM_CFG)
|
|
3918
3918
|
if not os.path.isfile(GM.Globals[GM.GAM_CFG_FILE]):
|
|
3919
|
-
for itemName, itemEntry in
|
|
3919
|
+
for itemName, itemEntry in GC.VAR_INFO.items():
|
|
3920
3920
|
if itemEntry[GC.VAR_TYPE] == GC.TYPE_DIRECTORY:
|
|
3921
3921
|
_getDefault(itemName, itemEntry, None)
|
|
3922
3922
|
oldGamPath = os.environ.get(EV_OLDGAMPATH, GC.Defaults[GC.CONFIG_DIR])
|
|
3923
|
-
for itemName, itemEntry in
|
|
3923
|
+
for itemName, itemEntry in GC.VAR_INFO.items():
|
|
3924
3924
|
if itemEntry[GC.VAR_TYPE] != GC.TYPE_DIRECTORY:
|
|
3925
3925
|
_getDefault(itemName, itemEntry, oldGamPath)
|
|
3926
3926
|
GM.Globals[GM.PARSER] = configparser.RawConfigParser(defaults=collections.OrderedDict(sorted(list(GC.Defaults.items()), key=lambda t: t[0])))
|
|
3927
3927
|
_checkMakeDir(GC.CONFIG_DIR)
|
|
3928
3928
|
_checkMakeDir(GC.CACHE_DIR)
|
|
3929
3929
|
_checkMakeDir(GC.DRIVE_DIR)
|
|
3930
|
-
for itemName, itemEntry in
|
|
3930
|
+
for itemName, itemEntry in GC.VAR_INFO.items():
|
|
3931
3931
|
if itemEntry[GC.VAR_TYPE] == GC.TYPE_FILE:
|
|
3932
3932
|
srcFile = os.path.expanduser(_stripStringQuotes(GM.Globals[GM.PARSER].get(configparser.DEFAULTSECT, itemName)))
|
|
3933
3933
|
_copyCfgFile(srcFile, GC.CONFIG_DIR, oldGamPath)
|
|
@@ -4035,7 +4035,7 @@ def SetGlobalVariables():
|
|
|
4035
4035
|
prevExtraArgsTxt = GC.Values.get(GC.EXTRA_ARGS, None)
|
|
4036
4036
|
prevOauth2serviceJson = GC.Values.get(GC.OAUTH2SERVICE_JSON, None)
|
|
4037
4037
|
# Assign global variables, directories, timezone first as other variables depend on them
|
|
4038
|
-
for itemName, itemEntry in sorted(
|
|
4038
|
+
for itemName, itemEntry in sorted(GC.VAR_INFO.items()):
|
|
4039
4039
|
varType = itemEntry[GC.VAR_TYPE]
|
|
4040
4040
|
if varType == GC.TYPE_DIRECTORY:
|
|
4041
4041
|
GC.Values[itemName] = _getCfgDirectory(sectionName, itemName)
|
|
@@ -4043,7 +4043,7 @@ def SetGlobalVariables():
|
|
|
4043
4043
|
GC.Values[itemName] = _getCfgTimezone(sectionName, itemName)
|
|
4044
4044
|
GM.Globals[GM.DATETIME_NOW] = datetime.datetime.now(GC.Values[GC.TIMEZONE])
|
|
4045
4045
|
# Everything else except row filters
|
|
4046
|
-
for itemName, itemEntry in sorted(
|
|
4046
|
+
for itemName, itemEntry in sorted(GC.VAR_INFO.items()):
|
|
4047
4047
|
varType = itemEntry[GC.VAR_TYPE]
|
|
4048
4048
|
if varType == GC.TYPE_BOOLEAN:
|
|
4049
4049
|
GC.Values[itemName] = _getCfgBoolean(sectionName, itemName)
|
|
@@ -4066,7 +4066,7 @@ def SetGlobalVariables():
|
|
|
4066
4066
|
elif varType == GC.TYPE_FILE:
|
|
4067
4067
|
GC.Values[itemName] = _getCfgFile(sectionName, itemName)
|
|
4068
4068
|
# Row filters
|
|
4069
|
-
for itemName, itemEntry in sorted(
|
|
4069
|
+
for itemName, itemEntry in sorted(GC.VAR_INFO.items()):
|
|
4070
4070
|
varType = itemEntry[GC.VAR_TYPE]
|
|
4071
4071
|
if varType == GC.TYPE_ROWFILTER:
|
|
4072
4072
|
GC.Values[itemName] = _getCfgRowFilter(sectionName, itemName)
|
|
@@ -4200,7 +4200,7 @@ def SetGlobalVariables():
|
|
|
4200
4200
|
# Clear input row filters/limit from parser, children can define but shouldn't inherit global value
|
|
4201
4201
|
# Clear output header/row filters/limit from parser, children can define or they will inherit global value if not defined
|
|
4202
4202
|
if GM.Globals[GM.PID] == 0:
|
|
4203
|
-
for itemName, itemEntry in sorted(
|
|
4203
|
+
for itemName, itemEntry in sorted(GC.VAR_INFO.items()):
|
|
4204
4204
|
varType = itemEntry[GC.VAR_TYPE]
|
|
4205
4205
|
if varType in {GC.TYPE_HEADERFILTER, GC.TYPE_HEADERFORCE, GC.TYPE_HEADERORDER, GC.TYPE_ROWFILTER}:
|
|
4206
4206
|
GM.Globals[GM.PARSER].set(sectionName, itemName, '')
|
|
@@ -7264,13 +7264,13 @@ def send_email(msgSubject, msgBody, msgTo, i=0, count=0, clientAccess=False, msg
|
|
|
7264
7264
|
return
|
|
7265
7265
|
toSent = set(recipients.split(','))
|
|
7266
7266
|
toFailed = {}
|
|
7267
|
-
for addr, err in
|
|
7267
|
+
for addr, err in result.items():
|
|
7268
7268
|
if addr in toSent:
|
|
7269
7269
|
toSent.remove(addr)
|
|
7270
7270
|
toFailed[addr] = f'{err[0]}: {err[1]}'
|
|
7271
7271
|
if toSent:
|
|
7272
7272
|
entityActionPerformed([entityType, ','.join(toSent), Ent.MESSAGE, msgSubject], i, count)
|
|
7273
|
-
for addr, errMsg in
|
|
7273
|
+
for addr, errMsg in toFailed.items():
|
|
7274
7274
|
entityActionFailedWarning([entityType, addr, Ent.MESSAGE, msgSubject], errMsg, i, count)
|
|
7275
7275
|
|
|
7276
7276
|
def cleanAddr(emailAddr):
|
|
@@ -7305,7 +7305,7 @@ def send_email(msgSubject, msgBody, msgTo, i=0, count=0, clientAccess=False, msg
|
|
|
7305
7305
|
if bccRecipients:
|
|
7306
7306
|
message['Bcc'] = bccRecipients.lower()
|
|
7307
7307
|
if msgHeaders:
|
|
7308
|
-
for header, value in
|
|
7308
|
+
for header, value in msgHeaders.items():
|
|
7309
7309
|
if header not in {'Subject', 'From', 'To', 'Reply-To', 'Cc', 'Bcc'}:
|
|
7310
7310
|
message[header] = value
|
|
7311
7311
|
if mailBox is None:
|
|
@@ -8065,7 +8065,7 @@ class CSVPrintFile():
|
|
|
8065
8065
|
localUser = localParent = False
|
|
8066
8066
|
tdfileidLocation = tdparentLocation = tdaddsheetLocation = tdupdatesheetLocation = tduserLocation = Cmd.Location()
|
|
8067
8067
|
tdsheetLocation = {}
|
|
8068
|
-
for sheetEntity in
|
|
8068
|
+
for sheetEntity in self.TDSHEET_ENTITY_MAP.values():
|
|
8069
8069
|
tdsheetLocation[sheetEntity] = Cmd.Location()
|
|
8070
8070
|
self.todrive = {'user': GC.Values[GC.TODRIVE_USER], 'title': None, 'description': None,
|
|
8071
8071
|
'sheetEntity': None, 'addsheet': False, 'updatesheet': False, 'sheettitle': None,
|
|
@@ -8215,7 +8215,7 @@ class CSVPrintFile():
|
|
|
8215
8215
|
throwReasons=GAPI.SHEETS_ACCESS_THROW_REASONS,
|
|
8216
8216
|
spreadsheetId=self.todrive['fileId'],
|
|
8217
8217
|
fields='spreadsheetUrl,sheets(properties(sheetId,title),protectedRanges(range(sheetId),requestingUserCanEdit))')
|
|
8218
|
-
for sheetEntity in
|
|
8218
|
+
for sheetEntity in self.TDSHEET_ENTITY_MAP.values():
|
|
8219
8219
|
if self.todrive[sheetEntity]:
|
|
8220
8220
|
sheetId = getSheetIdFromSheetEntity(spreadsheet, self.todrive[sheetEntity])
|
|
8221
8221
|
if sheetId is None:
|
|
@@ -8352,13 +8352,13 @@ class CSVPrintFile():
|
|
|
8352
8352
|
elif addPermissionsTitle:
|
|
8353
8353
|
titles.append(field)
|
|
8354
8354
|
addPermissionsTitle = False
|
|
8355
|
-
for subField in
|
|
8355
|
+
for subField in self.driveSubfieldsChoiceMap[field.lower()].values():
|
|
8356
8356
|
if not isinstance(subField, list):
|
|
8357
8357
|
titles.append(f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}0{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subField}')
|
|
8358
8358
|
else:
|
|
8359
8359
|
titles.extend([f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}0{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subSubField}' for subSubField in subField])
|
|
8360
8360
|
else:
|
|
8361
|
-
for subField in
|
|
8361
|
+
for subField in self.driveSubfieldsChoiceMap[field.lower()].values():
|
|
8362
8362
|
if not isinstance(subField, list):
|
|
8363
8363
|
titles.append(f'{field}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{subField}')
|
|
8364
8364
|
else:
|
|
@@ -8759,7 +8759,7 @@ class CSVPrintFile():
|
|
|
8759
8759
|
csvFile.seek(0)
|
|
8760
8760
|
spreadsheet = None
|
|
8761
8761
|
if self.todrive['updatesheet']:
|
|
8762
|
-
for sheetEntity in
|
|
8762
|
+
for sheetEntity in self.TDSHEET_ENTITY_MAP.values():
|
|
8763
8763
|
if self.todrive[sheetEntity]:
|
|
8764
8764
|
entityValueList = [Ent.USER, user, Ent.SPREADSHEET, title, self.todrive[sheetEntity]['sheetType'], self.todrive[sheetEntity]['sheetValue']]
|
|
8765
8765
|
if spreadsheet is None:
|
|
@@ -9101,7 +9101,7 @@ def cleanJSON(topStructure, listLimit=None, skipObjects=None, timeObjects=None):
|
|
|
9101
9101
|
listLen = len(structure)
|
|
9102
9102
|
listLen = min(listLen, listLimit or listLen)
|
|
9103
9103
|
return [_clean(v, '', DEFAULT_SKIP_OBJECTS) for v in structure[0:listLen]]
|
|
9104
|
-
return {k: _clean(v, k, DEFAULT_SKIP_OBJECTS) for k, v in sorted(
|
|
9104
|
+
return {k: _clean(v, k, DEFAULT_SKIP_OBJECTS) for k, v in sorted(structure.items()) if k not in subSkipObjects}
|
|
9105
9105
|
|
|
9106
9106
|
timeObjects = timeObjects or set()
|
|
9107
9107
|
return _clean(topStructure, '', DEFAULT_SKIP_OBJECTS.union(skipObjects or set()))
|
|
@@ -9136,7 +9136,7 @@ def flattenJSON(topStructure, flattened=None,
|
|
|
9136
9136
|
_flatten(structure[i], '', f'{path}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{i}')
|
|
9137
9137
|
else:
|
|
9138
9138
|
if structure:
|
|
9139
|
-
for k, v in sorted(
|
|
9139
|
+
for k, v in sorted(structure.items()):
|
|
9140
9140
|
if k not in DEFAULT_SKIP_OBJECTS:
|
|
9141
9141
|
_flatten(v, k, f'{path}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{k}')
|
|
9142
9142
|
else:
|
|
@@ -9147,7 +9147,7 @@ def flattenJSON(topStructure, flattened=None,
|
|
|
9147
9147
|
timeObjects = timeObjects or set()
|
|
9148
9148
|
noLenObjects = noLenObjects or set()
|
|
9149
9149
|
simpleLists = simpleLists or set()
|
|
9150
|
-
for k, v in sorted(
|
|
9150
|
+
for k, v in sorted(topStructure.items()):
|
|
9151
9151
|
if k not in allSkipObjects:
|
|
9152
9152
|
_flatten(v, k, k)
|
|
9153
9153
|
return flattened
|
|
@@ -10025,7 +10025,7 @@ def MultiprocessGAMCommands(items, showCmds):
|
|
|
10025
10025
|
while poolProcessResults[0] > 0:
|
|
10026
10026
|
time.sleep(1)
|
|
10027
10027
|
completedProcesses = []
|
|
10028
|
-
for p, result in
|
|
10028
|
+
for p, result in poolProcessResults.items():
|
|
10029
10029
|
if p != 0 and result.ready():
|
|
10030
10030
|
poolCallback(result.get())
|
|
10031
10031
|
completedProcesses.append(p)
|
|
@@ -10077,7 +10077,7 @@ def MultiprocessGAMCommands(items, showCmds):
|
|
|
10077
10077
|
if parallelPoolProcesses > 0:
|
|
10078
10078
|
while poolProcessResults[0] == parallelPoolProcesses:
|
|
10079
10079
|
completedProcesses = []
|
|
10080
|
-
for p, result in
|
|
10080
|
+
for p, result in poolProcessResults.items():
|
|
10081
10081
|
if p != 0 and result.ready():
|
|
10082
10082
|
poolCallback(result.get())
|
|
10083
10083
|
completedProcesses.append(p)
|
|
@@ -10098,7 +10098,7 @@ def MultiprocessGAMCommands(items, showCmds):
|
|
|
10098
10098
|
PROCESS_PLURAL_SINGULAR[poolProcessResults[0] == 1],
|
|
10099
10099
|
Msg.BATCH_CSV_WAIT_LIMIT.format(waitRemaining)))
|
|
10100
10100
|
completedProcesses = []
|
|
10101
|
-
for p, result in
|
|
10101
|
+
for p, result in poolProcessResults.items():
|
|
10102
10102
|
if p != 0 and result.ready():
|
|
10103
10103
|
poolCallback(result.get())
|
|
10104
10104
|
completedProcesses.append(p)
|
|
@@ -10254,7 +10254,7 @@ def doBatch(threadBatch=False):
|
|
|
10254
10254
|
if line.startswith('#'):
|
|
10255
10255
|
continue
|
|
10256
10256
|
if kwValues:
|
|
10257
|
-
for kw, value in
|
|
10257
|
+
for kw, value in kwValues.items():
|
|
10258
10258
|
line = line.replace(f'%{kw}%', value)
|
|
10259
10259
|
try:
|
|
10260
10260
|
argv = shlex.split(line)
|
|
@@ -10385,7 +10385,7 @@ def getSubFields(initial_argv, fieldNames):
|
|
|
10385
10385
|
|
|
10386
10386
|
def processSubFields(GAM_argv, row, subFields):
|
|
10387
10387
|
argv = GAM_argv[:]
|
|
10388
|
-
for GAM_argvI, fields in
|
|
10388
|
+
for GAM_argvI, fields in subFields.items():
|
|
10389
10389
|
oargv = argv[GAM_argvI][:]
|
|
10390
10390
|
argv[GAM_argvI] = ''
|
|
10391
10391
|
pos = 0
|
|
@@ -11273,7 +11273,7 @@ def doOAuthInfo():
|
|
|
11273
11273
|
if 'expires_in' in token_info:
|
|
11274
11274
|
printKeyValueList(['Expires', ISOformatTimeStamp((datetime.datetime.now()+datetime.timedelta(seconds=token_info['expires_in'])).replace(tzinfo=GC.Values[GC.TIMEZONE]))])
|
|
11275
11275
|
if showDetails:
|
|
11276
|
-
for k, v in sorted(
|
|
11276
|
+
for k, v in sorted(token_info.items()):
|
|
11277
11277
|
if k not in ['email', 'expires_in', 'issued_to', 'scope']:
|
|
11278
11278
|
printKeyValueList([k, v])
|
|
11279
11279
|
printBlankLine()
|
|
@@ -12081,7 +12081,7 @@ def doPrintShowProjects():
|
|
|
12081
12081
|
if jcount > 0:
|
|
12082
12082
|
printKeyValueList(['labels', jcount])
|
|
12083
12083
|
Ind.Increment()
|
|
12084
|
-
for k, v in
|
|
12084
|
+
for k, v in project['labels'].items():
|
|
12085
12085
|
printKeyValueList([k, v])
|
|
12086
12086
|
Ind.Decrement()
|
|
12087
12087
|
if 'parent' in project:
|
|
@@ -12104,7 +12104,7 @@ def doPrintShowProjects():
|
|
|
12104
12104
|
if 'condition' in binding:
|
|
12105
12105
|
printKeyValueList(['condition', ''])
|
|
12106
12106
|
Ind.Increment()
|
|
12107
|
-
for k, v in
|
|
12107
|
+
for k, v in binding['condition'].items():
|
|
12108
12108
|
printKeyValueList([k, v])
|
|
12109
12109
|
Ind.Decrement()
|
|
12110
12110
|
Ind.Decrement()
|
|
@@ -12145,7 +12145,7 @@ def doPrintShowProjects():
|
|
|
12145
12145
|
prow = row.copy()
|
|
12146
12146
|
prow[f'policy{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}role'] = binding['role']
|
|
12147
12147
|
if 'condition' in binding:
|
|
12148
|
-
for k, v in
|
|
12148
|
+
for k, v in binding['condition'].items():
|
|
12149
12149
|
prow[f'policy{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}condition{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{k}'] = v
|
|
12150
12150
|
members = binding.get('members', [])
|
|
12151
12151
|
if not oneMemberPerRow:
|
|
@@ -12301,7 +12301,7 @@ def checkServiceAccount(users):
|
|
|
12301
12301
|
testDeprecated = 'DEPRECATED'
|
|
12302
12302
|
if Act.Get() == Act.CHECK:
|
|
12303
12303
|
if not checkScopesSet:
|
|
12304
|
-
for scope in
|
|
12304
|
+
for scope in GM.Globals[GM.SVCACCT_SCOPES].values():
|
|
12305
12305
|
checkScopesSet.update(scope)
|
|
12306
12306
|
else:
|
|
12307
12307
|
if not checkScopesSet:
|
|
@@ -12508,7 +12508,7 @@ def _showSAKeys(keys, count, currentPrivateKeyId):
|
|
|
12508
12508
|
keyName = key.pop('name').rsplit('/', 1)[-1]
|
|
12509
12509
|
printKeyValueListWithCount(['name', keyName], i, count)
|
|
12510
12510
|
Ind.Increment()
|
|
12511
|
-
for k, v in sorted(
|
|
12511
|
+
for k, v in sorted(key.items()):
|
|
12512
12512
|
if k not in SVCACCT_KEY_TIME_OBJECTS:
|
|
12513
12513
|
printKeyValueList([k, v])
|
|
12514
12514
|
else:
|
|
@@ -13540,7 +13540,7 @@ def doReportUsage():
|
|
|
13540
13540
|
versions = {}
|
|
13541
13541
|
for version in item['msgValue']:
|
|
13542
13542
|
versions[version['version_number']] = version['num_devices']
|
|
13543
|
-
for k, v in sorted(
|
|
13543
|
+
for k, v in sorted(versions.items(), reverse=True):
|
|
13544
13544
|
title = f'cros:num_devices_chrome_{k}'
|
|
13545
13545
|
row[title] = v
|
|
13546
13546
|
else:
|
|
@@ -13808,7 +13808,7 @@ def doReport():
|
|
|
13808
13808
|
versions = {}
|
|
13809
13809
|
for version in item['msgValue']:
|
|
13810
13810
|
versions[version['version_number']] = version['num_devices']
|
|
13811
|
-
for k, v in sorted(
|
|
13811
|
+
for k, v in sorted(versions.items(), reverse=True):
|
|
13812
13812
|
title = f'cros:device_version{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{k}'
|
|
13813
13813
|
csvPF.AddTitles(title)
|
|
13814
13814
|
row[title] = v
|
|
@@ -13817,7 +13817,7 @@ def doReport():
|
|
|
13817
13817
|
for subitem in item['msgValue']:
|
|
13818
13818
|
if 'count' in subitem:
|
|
13819
13819
|
mycount = myvalue = None
|
|
13820
|
-
for key, value in
|
|
13820
|
+
for key, value in subitem.items():
|
|
13821
13821
|
if key == 'count':
|
|
13822
13822
|
mycount = value
|
|
13823
13823
|
else:
|
|
@@ -13882,7 +13882,7 @@ def doReport():
|
|
|
13882
13882
|
for subitem in item['msgValue']:
|
|
13883
13883
|
if 'count' in subitem:
|
|
13884
13884
|
mycount = myvalue = None
|
|
13885
|
-
for key, value in
|
|
13885
|
+
for key, value in subitem.items():
|
|
13886
13886
|
if key == 'count':
|
|
13887
13887
|
mycount = value
|
|
13888
13888
|
else:
|
|
@@ -14077,7 +14077,7 @@ def doReport():
|
|
|
14077
14077
|
if usageReports and not includeServices:
|
|
14078
14078
|
includeServices = set(fullDataServices)
|
|
14079
14079
|
if filterTimes and filters is not None:
|
|
14080
|
-
for filterTimeName, filterTimeValue in
|
|
14080
|
+
for filterTimeName, filterTimeValue in filterTimes.items():
|
|
14081
14081
|
filters = filters.replace(f'#{filterTimeName}#', filterTimeValue)
|
|
14082
14082
|
if not orgUnitId:
|
|
14083
14083
|
showOrgUnit = False
|
|
@@ -14191,9 +14191,9 @@ def doReport():
|
|
|
14191
14191
|
if user != 'all' and lastDate is None and GC.Values[GC.CSV_OUTPUT_USERS_AUDIT]:
|
|
14192
14192
|
csvPF.WriteRowNoFilter({'date': prevTryDate, 'email': user})
|
|
14193
14193
|
if aggregateByDate:
|
|
14194
|
-
for usageDate, events in
|
|
14194
|
+
for usageDate, events in eventCounts.items():
|
|
14195
14195
|
row = {'date': usageDate}
|
|
14196
|
-
for event, count in
|
|
14196
|
+
for event, count in events.items():
|
|
14197
14197
|
if convertMbToGb and event.endswith('_in_gb'):
|
|
14198
14198
|
count = f'{count/1024:.2f}'
|
|
14199
14199
|
row[event] = count
|
|
@@ -14201,11 +14201,11 @@ def doReport():
|
|
|
14201
14201
|
csvPF.SortRows('date', False)
|
|
14202
14202
|
csvPF.writeCSVfile(f'User Reports Aggregate - {tryDate}')
|
|
14203
14203
|
elif aggregateByUser:
|
|
14204
|
-
for email, events in
|
|
14204
|
+
for email, events in eventCounts.items():
|
|
14205
14205
|
row = {'email': email}
|
|
14206
14206
|
if showOrgUnit:
|
|
14207
14207
|
row['orgUnitPath'] = userOrgUnits.get(email, UNKNOWN)
|
|
14208
|
-
for event, count in
|
|
14208
|
+
for event, count in events.items():
|
|
14209
14209
|
if convertMbToGb and event.endswith('_in_gb'):
|
|
14210
14210
|
count = f'{count/1024:.2f}'
|
|
14211
14211
|
row[event] = count
|
|
@@ -14376,7 +14376,7 @@ def doReport():
|
|
|
14376
14376
|
for mess in message['parameter']:
|
|
14377
14377
|
value = mess.get('value', ' '.join(mess.get('multiValue', [])))
|
|
14378
14378
|
parts[mess['name']] = parts.get(mess['name'], [])+[value]
|
|
14379
|
-
for part, v in
|
|
14379
|
+
for part, v in parts.items():
|
|
14380
14380
|
if part == 'scope_name':
|
|
14381
14381
|
part = 'scope'
|
|
14382
14382
|
event[part] = ' '.join(v)
|
|
@@ -14443,20 +14443,20 @@ def doReport():
|
|
|
14443
14443
|
csvPF.AddTitles(sorted(addCSVData.keys()))
|
|
14444
14444
|
if eventCounts:
|
|
14445
14445
|
if not countsByDate:
|
|
14446
|
-
for actor, events in
|
|
14446
|
+
for actor, events in eventCounts.items():
|
|
14447
14447
|
row = {'emailAddress': actor}
|
|
14448
14448
|
row.update(zeroEventCounts)
|
|
14449
|
-
for event, count in
|
|
14449
|
+
for event, count in events.items():
|
|
14450
14450
|
row[event] = count
|
|
14451
14451
|
if addCSVData:
|
|
14452
14452
|
row.update(addCSVData)
|
|
14453
14453
|
csvPF.WriteRowTitles(row)
|
|
14454
14454
|
else:
|
|
14455
|
-
for actor, eventDates in
|
|
14456
|
-
for eventDate, events in
|
|
14455
|
+
for actor, eventDates in eventCounts.items():
|
|
14456
|
+
for eventDate, events in eventDates.items():
|
|
14457
14457
|
row = {'emailAddress': actor, 'date': eventDate}
|
|
14458
14458
|
row.update(zeroEventCounts)
|
|
14459
|
-
for event, count in
|
|
14459
|
+
for event, count in events.items():
|
|
14460
14460
|
row[event] = count
|
|
14461
14461
|
if addCSVData:
|
|
14462
14462
|
row.update(addCSVData)
|
|
@@ -14471,7 +14471,7 @@ def doReport():
|
|
|
14471
14471
|
if addCSVData:
|
|
14472
14472
|
csvPF.AddTitles(sorted(addCSVData.keys()))
|
|
14473
14473
|
if eventCounts:
|
|
14474
|
-
for event, count in sorted(
|
|
14474
|
+
for event, count in sorted(eventCounts.items()):
|
|
14475
14475
|
row = {'event': event, 'count': count}
|
|
14476
14476
|
if addCSVData:
|
|
14477
14477
|
row.update(addCSVData)
|
|
@@ -14756,7 +14756,7 @@ def _getTagReplacementFieldValues(user, i, count, tagReplacements, results=None)
|
|
|
14756
14756
|
else:
|
|
14757
14757
|
results = {'primaryEmail': user}
|
|
14758
14758
|
userName, domain = splitEmailAddress(user)
|
|
14759
|
-
for
|
|
14759
|
+
for tag in tagReplacements['tags'].values():
|
|
14760
14760
|
if tag.get('field'):
|
|
14761
14761
|
field = tag['field']
|
|
14762
14762
|
if field == 'primaryEmail':
|
|
@@ -18128,7 +18128,7 @@ def doPrintOrgs():
|
|
|
18128
18128
|
if showCrOSCounts or showUserCounts:
|
|
18129
18129
|
if showCrOSCounts:
|
|
18130
18130
|
total = 0
|
|
18131
|
-
for k, v in sorted(
|
|
18131
|
+
for k, v in sorted(crosCounts[orgUnitPath].items()):
|
|
18132
18132
|
row[f'CrOS{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{k}'] = v
|
|
18133
18133
|
total += v
|
|
18134
18134
|
row[f'CrOS{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}Total'] = total
|
|
@@ -20235,7 +20235,8 @@ def _clearUpdateContacts(updateContacts):
|
|
|
20235
20235
|
if not localContactSelects(contactsManager, contactQuery, fields):
|
|
20236
20236
|
continue
|
|
20237
20237
|
if updateContacts:
|
|
20238
|
-
|
|
20238
|
+
##### Zip
|
|
20239
|
+
for field, value in update_fields.items():
|
|
20239
20240
|
fields[field] = value
|
|
20240
20241
|
contactEntry = contactsManager.FieldsToContact(fields)
|
|
20241
20242
|
else:
|
|
@@ -22017,7 +22018,7 @@ def _processPersonMetadata(person, parameters):
|
|
|
22017
22018
|
person[PEOPLE_UPDATE_TIME] = formatLocalTime(sources[0][PEOPLE_UPDATE_TIME])
|
|
22018
22019
|
if parameters['sourceTypes']:
|
|
22019
22020
|
stripKeys = []
|
|
22020
|
-
for k, v in
|
|
22021
|
+
for k, v in person.items():
|
|
22021
22022
|
if isinstance(v, list):
|
|
22022
22023
|
person[k] = []
|
|
22023
22024
|
for entry in v:
|
|
@@ -22032,7 +22033,7 @@ def _processPersonMetadata(person, parameters):
|
|
|
22032
22033
|
person.pop(k, None)
|
|
22033
22034
|
if parameters['strip']:
|
|
22034
22035
|
person.pop(PEOPLE_METADATA, None)
|
|
22035
|
-
for
|
|
22036
|
+
for v in person.values():
|
|
22036
22037
|
if isinstance(v, list):
|
|
22037
22038
|
for entry in v:
|
|
22038
22039
|
if isinstance(entry, dict):
|
|
@@ -23164,7 +23165,7 @@ PEOPLE_GROUP_TIME_OBJECTS = {'updateTime'}
|
|
|
23164
23165
|
|
|
23165
23166
|
def _normalizeContactGroupMetadata(contactGroup):
|
|
23166
23167
|
normalizedContactGroup = contactGroup.copy()
|
|
23167
|
-
for k, v in
|
|
23168
|
+
for k, v in normalizedContactGroup.pop('metadata', {}).items():
|
|
23168
23169
|
normalizedContactGroup[k] = v
|
|
23169
23170
|
return normalizedContactGroup
|
|
23170
23171
|
|
|
@@ -24116,7 +24117,7 @@ def infoCrOSDevices(entityList):
|
|
|
24116
24117
|
if up in cros:
|
|
24117
24118
|
printKeyValueList([up, ''])
|
|
24118
24119
|
Ind.Increment()
|
|
24119
|
-
for key, value in sorted(
|
|
24120
|
+
for key, value in sorted(cros[up].items()):
|
|
24120
24121
|
if key not in CROS_TIME_OBJECTS:
|
|
24121
24122
|
printKeyValueList([key, value])
|
|
24122
24123
|
else:
|
|
@@ -24391,7 +24392,7 @@ def substituteQueryTimes(queries, queryTimes):
|
|
|
24391
24392
|
if queryTimes:
|
|
24392
24393
|
for i, query in enumerate(queries):
|
|
24393
24394
|
if query is not None:
|
|
24394
|
-
for queryTimeName, queryTimeValue in
|
|
24395
|
+
for queryTimeName, queryTimeValue in queryTimes.items():
|
|
24395
24396
|
query = query.replace(f'#{queryTimeName}#', queryTimeValue)
|
|
24396
24397
|
queries[i] = query
|
|
24397
24398
|
|
|
@@ -24476,7 +24477,7 @@ def doPrintCrOSDevices(entityList=None):
|
|
|
24476
24477
|
return
|
|
24477
24478
|
for attrib in ['diskSpaceUsage', 'osUpdateStatus', 'tpmVersionInfo']:
|
|
24478
24479
|
if attrib in cros:
|
|
24479
|
-
for key, value in sorted(
|
|
24480
|
+
for key, value in sorted(cros[attrib].items()):
|
|
24480
24481
|
attribKey = f'{attrib}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{key}'
|
|
24481
24482
|
if key not in CROS_TIME_OBJECTS:
|
|
24482
24483
|
cros[attribKey] = value
|
|
@@ -25649,7 +25650,7 @@ def doPrintShowChromeProfiles():
|
|
|
25649
25650
|
else:
|
|
25650
25651
|
FJQC.GetFormatJSONQuoteChar(myarg, True)
|
|
25651
25652
|
if filterTimes and cbfilter is not None:
|
|
25652
|
-
for filterTimeName, filterTimeValue in
|
|
25653
|
+
for filterTimeName, filterTimeValue in filterTimes.items():
|
|
25653
25654
|
cbfilter = cbfilter.replace(f'#{filterTimeName}#', filterTimeValue)
|
|
25654
25655
|
fields = getItemFieldsFromFieldsList('chromeBrowserProfiles', fieldsList)
|
|
25655
25656
|
customerId = _getCustomerId()
|
|
@@ -25724,7 +25725,7 @@ def _getChromeProfileNameEntityForCommand(cm, parameters):
|
|
|
25724
25725
|
parameters['commandNameList'][i] = f'customers/{customerId}/profiles/{commandName}'
|
|
25725
25726
|
return
|
|
25726
25727
|
if parameters['filterTimes']:
|
|
25727
|
-
for filterTimeName, filterTimeValue in
|
|
25728
|
+
for filterTimeName, filterTimeValue in parameters['filterTimes'].items():
|
|
25728
25729
|
parameters['cbfilter'] = parameters['cbfilter'].replace(f'#{filterTimeName}#', filterTimeValue)
|
|
25729
25730
|
printGettingAllAccountEntities(Ent.CHROME_PROFILE, parameters['cbfilter'])
|
|
25730
25731
|
pageMessage = getPageMessage()
|
|
@@ -28652,7 +28653,7 @@ def simplifyChromeSchemaDisplay(schema):
|
|
|
28652
28653
|
mfield.pop('number')
|
|
28653
28654
|
mtypeEntry['field'][mfield.pop('name')] = mfield
|
|
28654
28655
|
mesgDict[mesgType['name']] = mtypeEntry.copy()
|
|
28655
|
-
for
|
|
28656
|
+
for mtypeEntry in mesgDict.values():
|
|
28656
28657
|
for mfieldName, mfield in mtypeEntry['field'].items():
|
|
28657
28658
|
mfield['descriptions'] = []
|
|
28658
28659
|
if mfield['type'] == 'TYPE_STRING' and mfield.get('label') == 'LABEL_REPEATED':
|
|
@@ -29625,8 +29626,8 @@ def _showChromePolicySchemaStd(schema):
|
|
|
29625
29626
|
if vtype == 'TYPE_ENUM':
|
|
29626
29627
|
enums = mtypeEntry['subtype']['enums']
|
|
29627
29628
|
descriptions = mtypeEntry['descriptions']
|
|
29628
|
-
for i in
|
|
29629
|
-
printKeyValueList([f'{
|
|
29629
|
+
for i, v in enumerate(enums):
|
|
29630
|
+
printKeyValueList([f'{v}', f'{descriptions[i]}'])
|
|
29630
29631
|
elif vtype == 'TYPE_MESSAGE':
|
|
29631
29632
|
for mfieldName, mfield in mtypeEntry['subtype']['field'].items():
|
|
29632
29633
|
# managedBookmarks is recursive
|
|
@@ -29639,7 +29640,7 @@ def _showChromePolicySchemaStd(schema):
|
|
|
29639
29640
|
|
|
29640
29641
|
printKeyValueList([f'{schema.get("name")}', f'{schema.get("description")}'])
|
|
29641
29642
|
Ind.Increment()
|
|
29642
|
-
for
|
|
29643
|
+
for mtypeEntry in schema['settings'].values():
|
|
29643
29644
|
if mtypeEntry['subfield']:
|
|
29644
29645
|
continue
|
|
29645
29646
|
for mfieldName, mfield in mtypeEntry['field'].items():
|
|
@@ -29678,7 +29679,7 @@ def doShowChromePolicySchemasStd(cp):
|
|
|
29678
29679
|
for schema in result:
|
|
29679
29680
|
schema_name, schema_dict = simplifyChromeSchemaDisplay(schema)
|
|
29680
29681
|
schemas[schema_name.lower()] = schema_dict
|
|
29681
|
-
for _, schema in sorted(
|
|
29682
|
+
for _, schema in sorted(schemas.items()):
|
|
29682
29683
|
_showChromePolicySchemaStd(schema)
|
|
29683
29684
|
printBlankLine()
|
|
29684
29685
|
|
|
@@ -30036,11 +30037,11 @@ def doSyncCIDevices():
|
|
|
30036
30037
|
if last_sync == NEVER_TIME_NOMS:
|
|
30037
30038
|
remoteDeviceMap[sndt]['unassigned'] = True
|
|
30038
30039
|
devicesToAdd = []
|
|
30039
|
-
for sndt, device in
|
|
30040
|
+
for sndt, device in localDevices.items():
|
|
30040
30041
|
if sndt not in remoteDevices:
|
|
30041
30042
|
devicesToAdd.append(device)
|
|
30042
30043
|
missingDevices = []
|
|
30043
|
-
for sndt, device in
|
|
30044
|
+
for sndt, device in remoteDevices.items():
|
|
30044
30045
|
if sndt not in localDevices:
|
|
30045
30046
|
missingDevices.append(device)
|
|
30046
30047
|
Act.Set([Act.CREATE, Act.CREATE_PREVIEW][preview])
|
|
@@ -30337,10 +30338,10 @@ def doPrintCIDevices():
|
|
|
30337
30338
|
if mg:
|
|
30338
30339
|
du = mg.group(1)
|
|
30339
30340
|
state_name = mg.group(2)
|
|
30340
|
-
for
|
|
30341
|
-
if
|
|
30342
|
-
|
|
30343
|
-
|
|
30341
|
+
for deviceUser in deviceUsers:
|
|
30342
|
+
if deviceUser['name'] == du:
|
|
30343
|
+
deviceUser.setdefault('clientstates', {})
|
|
30344
|
+
deviceUser['clientstates'][state_name] = state
|
|
30344
30345
|
break
|
|
30345
30346
|
for deviceUser in deviceUsers:
|
|
30346
30347
|
mg = DEVICE_USERNAME_PATTERN.match(deviceUser['name'])
|
|
@@ -32748,7 +32749,7 @@ def doCreateGroup(ciGroupsAPI=False):
|
|
|
32748
32749
|
entityActionNotPerformedWarning([Ent.GROUP, groupEmail], Msg.EMAIL_ADDRESS_IS_UNMANAGED_ACCOUNT)
|
|
32749
32750
|
return
|
|
32750
32751
|
if ciGroupsAPI:
|
|
32751
|
-
for k, v in
|
|
32752
|
+
for k, v in GROUP_CIGROUP_FIELDS_MAP.items():
|
|
32752
32753
|
if k in gs_body:
|
|
32753
32754
|
body[v] = gs_body.pop(k)
|
|
32754
32755
|
body.setdefault('displayName', groupEmail)
|
|
@@ -33262,7 +33263,7 @@ def doUpdateGroups():
|
|
|
33262
33263
|
gs = buildGAPIObject(API.GROUPSSETTINGS)
|
|
33263
33264
|
gs_body = getSettingsFromGroup(cd, ','.join(entityList), gs, gs_body)
|
|
33264
33265
|
if ci_body:
|
|
33265
|
-
for k, v in
|
|
33266
|
+
for k, v in GROUP_CIGROUP_FIELDS_MAP.items():
|
|
33266
33267
|
if k in gs_body:
|
|
33267
33268
|
ci_body[v] = gs_body.pop(k)
|
|
33268
33269
|
if gs_body:
|
|
@@ -33896,7 +33897,7 @@ def _showCIGroup(group, groupEmail, i=0, count=0):
|
|
|
33896
33897
|
continue
|
|
33897
33898
|
value = group[key]
|
|
33898
33899
|
if key == 'labels':
|
|
33899
|
-
for k, v in
|
|
33900
|
+
for k, v in value.items():
|
|
33900
33901
|
if v == '':
|
|
33901
33902
|
value[k] = True
|
|
33902
33903
|
if isinstance(value, (list, dict)):
|
|
@@ -34099,7 +34100,7 @@ def infoGroups(entityList):
|
|
|
34099
34100
|
else:
|
|
34100
34101
|
printKeyValueWithCRsNLs(key, value)
|
|
34101
34102
|
if settings:
|
|
34102
|
-
for
|
|
34103
|
+
for _, attr in sorted(GROUP_SETTINGS_ATTRIBUTES.items()):
|
|
34103
34104
|
key = attr[0]
|
|
34104
34105
|
if key in settings:
|
|
34105
34106
|
if key not in GROUP_FIELDS_WITH_CRS_NLS:
|
|
@@ -34114,7 +34115,7 @@ def infoGroups(entityList):
|
|
|
34114
34115
|
else:
|
|
34115
34116
|
showTitle = True
|
|
34116
34117
|
if showDeprecatedAttributes:
|
|
34117
|
-
for
|
|
34118
|
+
for _, subattr in sorted(GROUP_MERGED_TO_COMPONENT_MAP[key].items()):
|
|
34118
34119
|
subkey = subattr[0]
|
|
34119
34120
|
if subkey in settings:
|
|
34120
34121
|
if showTitle:
|
|
@@ -34126,7 +34127,7 @@ def infoGroups(entityList):
|
|
|
34126
34127
|
Ind.Decrement()
|
|
34127
34128
|
if showDeprecatedAttributes:
|
|
34128
34129
|
showTitle = True
|
|
34129
|
-
for
|
|
34130
|
+
for _, attr in sorted(GROUP_DEPRECATED_ATTRIBUTES.items()):
|
|
34130
34131
|
subkey = attr[0]
|
|
34131
34132
|
if subkey in settings:
|
|
34132
34133
|
if showTitle:
|
|
@@ -34246,23 +34247,23 @@ def clearUnneededGroupMatchPatterns(matchPatterns):
|
|
|
34246
34247
|
matchPatterns.pop(field, None)
|
|
34247
34248
|
|
|
34248
34249
|
def checkGroupMatchPatterns(groupEmail, group, matchPatterns):
|
|
34249
|
-
for field,
|
|
34250
|
+
for field, matchp in matchPatterns.items():
|
|
34250
34251
|
if field == 'email':
|
|
34251
|
-
if not
|
|
34252
|
-
if not
|
|
34252
|
+
if not matchp['not']:
|
|
34253
|
+
if not matchp['pattern'].match(groupEmail):
|
|
34253
34254
|
return False
|
|
34254
34255
|
else:
|
|
34255
|
-
if
|
|
34256
|
+
if matchp['pattern'].match(groupEmail):
|
|
34256
34257
|
return False
|
|
34257
34258
|
elif field == 'adminCreated':
|
|
34258
|
-
if
|
|
34259
|
+
if matchp != group[field]:
|
|
34259
34260
|
return False
|
|
34260
34261
|
else: # field in {'name', 'displayName', 'description'}:
|
|
34261
|
-
if not
|
|
34262
|
-
if not
|
|
34262
|
+
if not matchp['not']:
|
|
34263
|
+
if not matchp['pattern'].match(group[field]):
|
|
34263
34264
|
return False
|
|
34264
34265
|
else:
|
|
34265
|
-
if
|
|
34266
|
+
if matchp['pattern'].match(group[field]):
|
|
34266
34267
|
return False
|
|
34267
34268
|
return True
|
|
34268
34269
|
|
|
@@ -34416,11 +34417,11 @@ def doPrintGroups():
|
|
|
34416
34417
|
if matchSettings:
|
|
34417
34418
|
if not isinstance(groupSettings, dict):
|
|
34418
34419
|
return
|
|
34419
|
-
for key,
|
|
34420
|
+
for key, matchp in matchSettings.items():
|
|
34420
34421
|
gvalue = groupSettings.get(key)
|
|
34421
|
-
if
|
|
34422
|
+
if matchp['notvalues'] and gvalue in matchp['notvalues']:
|
|
34422
34423
|
return
|
|
34423
|
-
if
|
|
34424
|
+
if matchp['values'] and gvalue not in matchp['values']:
|
|
34424
34425
|
return
|
|
34425
34426
|
if showOwnedBy and not checkGroupShowOwnedBy(showOwnedBy, groupMembers):
|
|
34426
34427
|
return
|
|
@@ -34458,7 +34459,7 @@ def doPrintGroups():
|
|
|
34458
34459
|
addMemberInfoToRow(row, groupMembers, typesSet, memberOptions, memberDisplayOptions, delimiter,
|
|
34459
34460
|
isSuspended, isArchived, False)
|
|
34460
34461
|
if isinstance(groupSettings, dict):
|
|
34461
|
-
for key, value in
|
|
34462
|
+
for key, value in groupSettings.items():
|
|
34462
34463
|
if key not in {'kind', 'etag', 'email', 'name', 'description'}:
|
|
34463
34464
|
if value is None:
|
|
34464
34465
|
value = ''
|
|
@@ -34469,12 +34470,12 @@ def doPrintGroups():
|
|
|
34469
34470
|
row[key] = value
|
|
34470
34471
|
groupCloudEntity = ciGroups.get(row['email'], {})
|
|
34471
34472
|
if groupCloudEntity:
|
|
34472
|
-
for k, v in
|
|
34473
|
+
for k, v in groupCloudEntity.pop('labels', {}).items():
|
|
34473
34474
|
if v == '':
|
|
34474
34475
|
groupCloudEntity[f'labels{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{k}'] = True
|
|
34475
34476
|
else:
|
|
34476
34477
|
groupCloudEntity[f'labels{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{k}'] = v
|
|
34477
|
-
for key, value in sorted(
|
|
34478
|
+
for key, value in sorted(flattenJSON({'cloudIdentity': groupCloudEntity}, flattened={}, timeObjects=CIGROUP_TIME_OBJECTS).items()):
|
|
34478
34479
|
csvPF.AddTitles(key)
|
|
34479
34480
|
row[key] = value
|
|
34480
34481
|
csvPF.WriteRow(row)
|
|
@@ -34618,11 +34619,11 @@ def doPrintGroups():
|
|
|
34618
34619
|
elif myarg == 'maxresults':
|
|
34619
34620
|
maxResults = getInteger(minVal=1, maxVal=200)
|
|
34620
34621
|
elif myarg == 'nodeprecated':
|
|
34621
|
-
deprecatedAttributesSet.update([attr[0] for attr in
|
|
34622
|
-
deprecatedAttributesSet.update([attr[0] for attr in
|
|
34623
|
-
deprecatedAttributesSet.update([attr[0] for attr in
|
|
34624
|
-
deprecatedAttributesSet.update([attr[0] for attr in
|
|
34625
|
-
deprecatedAttributesSet.update([attr[0] for attr in
|
|
34622
|
+
deprecatedAttributesSet.update([attr[0] for attr in GROUP_DISCOVER_ATTRIBUTES.values()])
|
|
34623
|
+
deprecatedAttributesSet.update([attr[0] for attr in GROUP_ASSIST_CONTENT_ATTRIBUTES.values()])
|
|
34624
|
+
deprecatedAttributesSet.update([attr[0] for attr in GROUP_MODERATE_CONTENT_ATTRIBUTES.values()])
|
|
34625
|
+
deprecatedAttributesSet.update([attr[0] for attr in GROUP_MODERATE_MEMBERS_ATTRIBUTES.values()])
|
|
34626
|
+
deprecatedAttributesSet.update([attr[0] for attr in GROUP_DEPRECATED_ATTRIBUTES.values()])
|
|
34626
34627
|
elif myarg in {'convertcrnl', 'converttextnl', 'convertfooternl'}:
|
|
34627
34628
|
convertCRNL = True
|
|
34628
34629
|
elif myarg == 'delimiter':
|
|
@@ -34670,7 +34671,7 @@ def doPrintGroups():
|
|
|
34670
34671
|
valueList = getChoice({'not': 'notvalues'}, mapChoice=True, defaultChoice='values')
|
|
34671
34672
|
matchBody = {}
|
|
34672
34673
|
getGroupAttrValue(getString(Cmd.OB_FIELD_NAME).lower(), matchBody)
|
|
34673
|
-
for key, value in
|
|
34674
|
+
for key, value in matchBody.items():
|
|
34674
34675
|
matchSettings.setdefault(key, {'notvalues': [], 'values': []})
|
|
34675
34676
|
matchSettings[key][valueList].append(value)
|
|
34676
34677
|
elif getPGGroupRolesMemberDisplayOptions(myarg, rolesSet, memberDisplayOptions):
|
|
@@ -34851,13 +34852,13 @@ def doPrintGroups():
|
|
|
34851
34852
|
if sortHeaders:
|
|
34852
34853
|
sortTitles = ['email']+GROUP_INFO_PRINT_ORDER+['aliases', 'nonEditableAliases']
|
|
34853
34854
|
if getSettings:
|
|
34854
|
-
sortTitles += sorted([attr[0] for attr in
|
|
34855
|
+
sortTitles += sorted([attr[0] for attr in GROUP_SETTINGS_ATTRIBUTES.values()])
|
|
34855
34856
|
for key in GROUP_MERGED_ATTRIBUTES_PRINT_ORDER:
|
|
34856
34857
|
sortTitles.append(key)
|
|
34857
34858
|
if not deprecatedAttributesSet:
|
|
34858
|
-
sortTitles += sorted([attr[0] for attr in
|
|
34859
|
+
sortTitles += sorted([attr[0] for attr in GROUP_MERGED_TO_COMPONENT_MAP[key].values()])
|
|
34859
34860
|
if not deprecatedAttributesSet:
|
|
34860
|
-
sortTitles += sorted([attr[0] for attr in
|
|
34861
|
+
sortTitles += sorted([attr[0] for attr in GROUP_DEPRECATED_ATTRIBUTES.values()])
|
|
34861
34862
|
if rolesSet:
|
|
34862
34863
|
setMemberDisplaySortTitles(memberDisplayOptions, sortTitles)
|
|
34863
34864
|
csvPF.SetSortTitles(sortTitles)
|
|
@@ -35848,7 +35849,7 @@ def doUpdateCIGroups():
|
|
|
35848
35849
|
elif myarg in ['memberrestriction', 'memberrestrictions']:
|
|
35849
35850
|
query = getString(Cmd.OB_QUERY, minLen=0)
|
|
35850
35851
|
member_types = {'USER': '1', 'SERVICE_ACCOUNT': '2', 'GROUP': '3',}
|
|
35851
|
-
for key, val in
|
|
35852
|
+
for key, val in member_types.items():
|
|
35852
35853
|
query = query.replace(key, val)
|
|
35853
35854
|
se_body['memberRestriction'] = {'query': query}
|
|
35854
35855
|
elif myarg == 'locked':
|
|
@@ -35864,7 +35865,7 @@ def doUpdateCIGroups():
|
|
|
35864
35865
|
if gs_body:
|
|
35865
35866
|
gs = buildGAPIObject(API.GROUPSSETTINGS)
|
|
35866
35867
|
gs_body = getSettingsFromGroup(cd, ','.join(entityList), gs, gs_body)
|
|
35867
|
-
for k, v in
|
|
35868
|
+
for k, v in GROUP_CIGROUP_FIELDS_MAP.items():
|
|
35868
35869
|
if k in gs_body:
|
|
35869
35870
|
ci_body[v] = gs_body.pop(k)
|
|
35870
35871
|
if gs_body:
|
|
@@ -36817,12 +36818,12 @@ def doPrintCIGroups():
|
|
|
36817
36818
|
csvPF.WriteRowNoFilter(row)
|
|
36818
36819
|
return
|
|
36819
36820
|
mapCIGroupFieldNames(groupEntity)
|
|
36820
|
-
for k, v in
|
|
36821
|
+
for k, v in groupEntity.pop('labels', {}).items():
|
|
36821
36822
|
if v == '':
|
|
36822
36823
|
groupEntity[f'labels{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{k}'] = True
|
|
36823
36824
|
else:
|
|
36824
36825
|
groupEntity[f'labels{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{k}'] = v
|
|
36825
|
-
for key, value in sorted(
|
|
36826
|
+
for key, value in sorted(flattenJSON(groupEntity, flattened={}, timeObjects=CIGROUP_TIME_OBJECTS).items()):
|
|
36826
36827
|
csvPF.AddTitles(key)
|
|
36827
36828
|
row[key] = value
|
|
36828
36829
|
if rolesSet and groupMembers is not None:
|
|
@@ -38058,7 +38059,7 @@ def _getBuildingByNameOrId(cd, minLen=1, allowNV=False):
|
|
|
38058
38059
|
# No exact name match, check for case insensitive name matches
|
|
38059
38060
|
which_building_lower = which_building.lower()
|
|
38060
38061
|
ci_matches = []
|
|
38061
|
-
for buildingName, buildingId in
|
|
38062
|
+
for buildingName, buildingId in GM.Globals[GM.MAP_BUILDING_NAME_TO_ID].items():
|
|
38062
38063
|
if buildingName.lower() == which_building_lower:
|
|
38063
38064
|
ci_matches.append({'buildingName': buildingName, 'buildingId': buildingId})
|
|
38064
38065
|
# One match, return ID
|
|
@@ -39436,7 +39437,7 @@ def _getCalendarEventAttribute(myarg, body, parameters, function):
|
|
|
39436
39437
|
body.pop(field, None)
|
|
39437
39438
|
|
|
39438
39439
|
def clearJSONsubfields(body, clearFields):
|
|
39439
|
-
for field, subfields in
|
|
39440
|
+
for field, subfields in clearFields.items():
|
|
39440
39441
|
if field in body:
|
|
39441
39442
|
if isinstance(body[field], list):
|
|
39442
39443
|
for item in body[field]:
|
|
@@ -41722,7 +41723,7 @@ def _validateVaultQuery(body, corpusArgumentMap):
|
|
|
41722
41723
|
if 'searchMethod' not in body['query']:
|
|
41723
41724
|
missingArgumentExit(formatChoiceList(VAULT_SEARCH_METHODS_MAP))
|
|
41724
41725
|
if 'exportOptions' in body:
|
|
41725
|
-
for corpus, options in
|
|
41726
|
+
for corpus, options in VAULT_CORPUS_OPTIONS_MAP.items():
|
|
41726
41727
|
if body['query']['corpus'] != corpus:
|
|
41727
41728
|
body['exportOptions'].pop(options, None)
|
|
41728
41729
|
|
|
@@ -45008,7 +45009,7 @@ def _filterSchemaFields(userEntity, schemaParms):
|
|
|
45008
45009
|
customSchemas = {}
|
|
45009
45010
|
for schema in sorted(schemas):
|
|
45010
45011
|
if schema in schemaParms['selectedSchemaFields']:
|
|
45011
|
-
for field, value in sorted(
|
|
45012
|
+
for field, value in sorted(schemas[schema].items()):
|
|
45012
45013
|
if field not in schemaParms['selectedSchemaFields'][schema]:
|
|
45013
45014
|
continue
|
|
45014
45015
|
customSchemas.setdefault(schema, {})
|
|
@@ -45952,7 +45953,7 @@ def doPrintUsers(entityList=None):
|
|
|
45952
45953
|
if sortRows and orderBy == 'email':
|
|
45953
45954
|
csvPF.SortRows('primaryEmail', reverse=sortOrder == 'DESCENDING')
|
|
45954
45955
|
elif not FJQC.formatJSON:
|
|
45955
|
-
for domain, count in sorted(
|
|
45956
|
+
for domain, count in sorted(domainCounts.items()):
|
|
45956
45957
|
csvPF.WriteRowNoFilter({'domain': domain, 'count': count})
|
|
45957
45958
|
else:
|
|
45958
45959
|
csvPF.WriteRowNoFilter({'JSON': json.dumps(cleanJSON(domainCounts), ensure_ascii=False, sort_keys=True)})
|
|
@@ -46047,7 +46048,7 @@ def doPrintUserCountsByOrgUnit():
|
|
|
46047
46048
|
userCounts[orgUnitPath]['active'] += 1
|
|
46048
46049
|
userCounts[orgUnitPath]['total'] += 1
|
|
46049
46050
|
totalCounts = USER_COUNTS_ZERO_FIELDS.copy()
|
|
46050
|
-
for k, v in sorted(
|
|
46051
|
+
for k, v in sorted(userCounts.items()):
|
|
46051
46052
|
_printUserCounts(k, v)
|
|
46052
46053
|
for f in USER_COUNTS_FIELDS:
|
|
46053
46054
|
totalCounts[f] += v[f]
|
|
@@ -51856,7 +51857,7 @@ def transferCalendars(users):
|
|
|
51856
51857
|
body[field] = updateBody[field]
|
|
51857
51858
|
else:
|
|
51858
51859
|
body = {}
|
|
51859
|
-
for field, updateField in
|
|
51860
|
+
for field, updateField in updateBody.items():
|
|
51860
51861
|
if field not in appendFieldsList:
|
|
51861
51862
|
body[field] = updateField
|
|
51862
51863
|
callGAPI(targetCal.calendars(), 'patch',
|
|
@@ -52202,7 +52203,7 @@ def updateCalendarAttendees(users):
|
|
|
52202
52203
|
entityActionNotPerformedWarning([Ent.EVENT, eventSummary], Msg.USER_IS_NOT_ORGANIZER, k, kcount)
|
|
52203
52204
|
continue
|
|
52204
52205
|
needsUpdate = False
|
|
52205
|
-
for _, v in sorted(
|
|
52206
|
+
for _, v in sorted(attendeeMap.items()):
|
|
52206
52207
|
v['done'] = False
|
|
52207
52208
|
updatedAttendeesAdd = []
|
|
52208
52209
|
updatedAttendeesRemove = []
|
|
@@ -52258,7 +52259,7 @@ def updateCalendarAttendees(users):
|
|
|
52258
52259
|
entityPerformActionModifierNewValue([Ent.EVENT, eventSummary, Ent.ATTENDEE, oldAddr], Act.MODIFIER_WITH, update['email'], u, ucount)
|
|
52259
52260
|
updatedAttendeesAdd.append(attendee)
|
|
52260
52261
|
needsUpdate = True
|
|
52261
|
-
for newAddr, v in sorted(
|
|
52262
|
+
for newAddr, v in sorted(attendeeMap.items()):
|
|
52262
52263
|
if v['op'] == 'add' and not v['done']:
|
|
52263
52264
|
u += 1
|
|
52264
52265
|
v['done'] = True
|
|
@@ -52271,7 +52272,7 @@ def updateCalendarAttendees(users):
|
|
|
52271
52272
|
entityPerformAction([Ent.EVENT, eventSummary, Ent.ATTENDEE, newAddr], u, ucount)
|
|
52272
52273
|
updatedAttendeesAdd.append(attendee)
|
|
52273
52274
|
needsUpdate = True
|
|
52274
|
-
for newAddr, v in sorted(
|
|
52275
|
+
for newAddr, v in sorted(attendeeMap.items()):
|
|
52275
52276
|
if not v['done']:
|
|
52276
52277
|
u += 1
|
|
52277
52278
|
Act.Set(Act.SKIP)
|
|
@@ -54088,7 +54089,7 @@ def printDriveActivity(users):
|
|
|
54088
54089
|
if isinstance(v, (dict, list)):
|
|
54089
54090
|
_updateKnownUsers(v)
|
|
54090
54091
|
elif isinstance(structure, dict):
|
|
54091
|
-
for k, v in sorted(
|
|
54092
|
+
for k, v in sorted(structure.items()):
|
|
54092
54093
|
if k != 'knownUser':
|
|
54093
54094
|
if isinstance(v, (dict, list)):
|
|
54094
54095
|
_updateKnownUsers(v)
|
|
@@ -54335,7 +54336,7 @@ def printShowDriveSettings(users):
|
|
|
54335
54336
|
if title in fieldsList and title in feed:
|
|
54336
54337
|
printKeyValueList([title, None])
|
|
54337
54338
|
Ind.Increment()
|
|
54338
|
-
for item, value in sorted(
|
|
54339
|
+
for item, value in sorted(feed[title].items()):
|
|
54339
54340
|
printKeyValueList([item, delimiter.join(value)])
|
|
54340
54341
|
Ind.Decrement()
|
|
54341
54342
|
|
|
@@ -54351,7 +54352,7 @@ def printShowDriveSettings(users):
|
|
|
54351
54352
|
jcount = len(feed[title])
|
|
54352
54353
|
row[title] = jcount
|
|
54353
54354
|
j = 0
|
|
54354
|
-
for item, value in sorted(
|
|
54355
|
+
for item, value in sorted(feed[title].items()):
|
|
54355
54356
|
row[f'{title}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{j:02d}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{item}'] = delimiter.join(value)
|
|
54356
54357
|
j += 1
|
|
54357
54358
|
|
|
@@ -54429,7 +54430,7 @@ def printShowDriveSettings(users):
|
|
|
54429
54430
|
if 'maxImportSizes' in fieldsList and 'maxImportSizes' in fieldsList:
|
|
54430
54431
|
printKeyValueList(['maxImportSizes', None])
|
|
54431
54432
|
Ind.Increment()
|
|
54432
|
-
for setting, value in
|
|
54433
|
+
for setting, value in feed['maxImportSizes'].items():
|
|
54433
54434
|
printKeyValueList([setting, formatFileSize(int(value))])
|
|
54434
54435
|
Ind.Decrement()
|
|
54435
54436
|
if 'driveThemes' in fieldsList and 'driveThemes' in feed:
|
|
@@ -54451,7 +54452,7 @@ def printShowDriveSettings(users):
|
|
|
54451
54452
|
jcount = len(feed['maxImportSizes'])
|
|
54452
54453
|
row['maxImportSizes'] = jcount
|
|
54453
54454
|
j = 0
|
|
54454
|
-
for setting, value in
|
|
54455
|
+
for setting, value in feed['maxImportSizes'].items():
|
|
54455
54456
|
row[f'maxImportSizes{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{j}{GC.Values[GC.CSV_OUTPUT_SUBFIELD_DELIMITER]}{setting}'] = formatFileSize(int(value))
|
|
54456
54457
|
j += 1
|
|
54457
54458
|
if 'driveThemes' in fieldsList and 'driveThemes' in feed:
|
|
@@ -54540,7 +54541,7 @@ def getFilePaths(drive, fileTree, initialResult, filePathInfo, addParentsToTree=
|
|
|
54540
54541
|
paths[parentId][lparentId] = filePathInfo['allPaths'][lparentId]
|
|
54541
54542
|
|
|
54542
54543
|
def _makeFilePaths(localPaths, fplist, filePaths, name, maxDepth):
|
|
54543
|
-
for k, v in
|
|
54544
|
+
for k, v in localPaths.items():
|
|
54544
54545
|
fplist.append(filePathInfo['ids'].get(k, ''))
|
|
54545
54546
|
if not v:
|
|
54546
54547
|
fp = fplist[:]
|
|
@@ -54646,28 +54647,28 @@ def _mapDriveProperties(f_file):
|
|
|
54646
54647
|
properties = f_file.pop('properties', [])
|
|
54647
54648
|
if appProperties:
|
|
54648
54649
|
f_file.setdefault('properties', [])
|
|
54649
|
-
for key, value in sorted(
|
|
54650
|
+
for key, value in sorted(appProperties.items()):
|
|
54650
54651
|
f_file['properties'].append({'key': key, 'value': value, 'visibility': 'PRIVATE'})
|
|
54651
54652
|
if properties:
|
|
54652
54653
|
f_file.setdefault('properties', [])
|
|
54653
|
-
for key, value in sorted(
|
|
54654
|
+
for key, value in sorted(properties.items()):
|
|
54654
54655
|
f_file['properties'].append({'key': key, 'value': value, 'visibility': 'PUBLIC'})
|
|
54655
54656
|
|
|
54656
54657
|
def _mapDriveFieldNames(f_file, user, parentsSubFields, mapToLabels):
|
|
54657
54658
|
if mapToLabels:
|
|
54658
|
-
for attrib, v2attrib in
|
|
54659
|
+
for attrib, v2attrib in API.DRIVE3_TO_DRIVE2_LABELS_MAP.items():
|
|
54659
54660
|
if attrib in f_file:
|
|
54660
54661
|
f_file.setdefault('labels', {})
|
|
54661
54662
|
f_file['labels'][v2attrib] = f_file.pop(attrib)
|
|
54662
|
-
for attrib, v2attrib in
|
|
54663
|
+
for attrib, v2attrib in API.DRIVE3_TO_DRIVE2_FILES_FIELDS_MAP.items():
|
|
54663
54664
|
if attrib in f_file:
|
|
54664
54665
|
f_file[v2attrib] = f_file.pop(attrib)
|
|
54665
54666
|
capabilities = f_file.get('capabilities')
|
|
54666
54667
|
if capabilities:
|
|
54667
|
-
for attrib, v2attrib in
|
|
54668
|
+
for attrib, v2attrib in API.DRIVE3_TO_DRIVE2_CAPABILITIES_FIELDS_MAP.items():
|
|
54668
54669
|
if attrib in capabilities:
|
|
54669
54670
|
f_file[v2attrib] = capabilities[attrib]
|
|
54670
|
-
for attrib, v2attrib in
|
|
54671
|
+
for attrib, v2attrib in API.DRIVE3_TO_DRIVE2_CAPABILITIES_NAMES_MAP.items():
|
|
54671
54672
|
if attrib in capabilities:
|
|
54672
54673
|
capabilities[v2attrib] = capabilities.pop(attrib)
|
|
54673
54674
|
if 'spaces' in f_file:
|
|
@@ -54691,7 +54692,7 @@ def _mapDriveFieldNames(f_file, user, parentsSubFields, mapToLabels):
|
|
|
54691
54692
|
_mapDrivePermissionNames(permission)
|
|
54692
54693
|
|
|
54693
54694
|
def _mapDriveRevisionNames(revision):
|
|
54694
|
-
for attrib, v2attrib in
|
|
54695
|
+
for attrib, v2attrib in API.DRIVE3_TO_DRIVE2_REVISIONS_FIELDS_MAP.items():
|
|
54695
54696
|
if attrib in revision:
|
|
54696
54697
|
revision[v2attrib] = revision.pop(attrib)
|
|
54697
54698
|
if 'lastModifyingUser' in revision:
|
|
@@ -56135,7 +56136,7 @@ class PermissionMatch():
|
|
|
56135
56136
|
@staticmethod
|
|
56136
56137
|
def CheckPermissionMatch(permission, permissionMatch):
|
|
56137
56138
|
match = False
|
|
56138
|
-
for field, value in
|
|
56139
|
+
for field, value in permissionMatch[1].items():
|
|
56139
56140
|
if field in {'type', 'role'}:
|
|
56140
56141
|
if permission.get(field, '') not in value:
|
|
56141
56142
|
break
|
|
@@ -56434,7 +56435,7 @@ class DriveListParameters():
|
|
|
56434
56435
|
if self.excludeTrashed:
|
|
56435
56436
|
self.AppendToQuery('trashed=false')
|
|
56436
56437
|
if self.fileIdEntity['query']:
|
|
56437
|
-
for queryTimeName, queryTimeValue in
|
|
56438
|
+
for queryTimeName, queryTimeValue in self.queryTimes.items():
|
|
56438
56439
|
self.fileIdEntity['query'] = self.fileIdEntity['query'].replace(f'#{queryTimeName}#', queryTimeValue)
|
|
56439
56440
|
self.fileIdEntity['query'] = _mapDrive2QueryToDrive3(self.fileIdEntity['query'])
|
|
56440
56441
|
if not fileIdEntity.get('shareddrive'):
|
|
@@ -56749,7 +56750,7 @@ def printFileList(users):
|
|
|
56749
56750
|
|
|
56750
56751
|
def writeMimeTypeCountsRow(user, sourceId, sourceName, mimeTypeInfo):
|
|
56751
56752
|
countTotal = sizeTotal = 0
|
|
56752
|
-
for mtinfo in
|
|
56753
|
+
for mtinfo in mimeTypeInfo.values():
|
|
56753
56754
|
countTotal += mtinfo['count']
|
|
56754
56755
|
sizeTotal += mtinfo['size']
|
|
56755
56756
|
row = {'Owner': user, 'Total': countTotal}
|
|
@@ -56760,7 +56761,7 @@ def printFileList(users):
|
|
|
56760
56761
|
row['Size'] = sizeTotal
|
|
56761
56762
|
if addCSVData:
|
|
56762
56763
|
row.update(addCSVData)
|
|
56763
|
-
for mimeType, mtinfo in sorted(
|
|
56764
|
+
for mimeType, mtinfo in sorted(mimeTypeInfo.items()):
|
|
56764
56765
|
row[f'{mimeType}'] = mtinfo['count']
|
|
56765
56766
|
if showMimeTypeSize:
|
|
56766
56767
|
row[f'{mimeType}:Size'] = mtinfo['size']
|
|
@@ -56962,7 +56963,7 @@ def printFileList(users):
|
|
|
56962
56963
|
fileNameTitle = 'name'
|
|
56963
56964
|
csvPF.RemoveTitles(['capabilities'])
|
|
56964
56965
|
if DLP.queryTimes and selectSubQuery:
|
|
56965
|
-
for queryTimeName, queryTimeValue in
|
|
56966
|
+
for queryTimeName, queryTimeValue in DLP.queryTimes.items():
|
|
56966
56967
|
selectSubQuery = selectSubQuery.replace(f'#{queryTimeName}#', queryTimeValue)
|
|
56967
56968
|
selectSubQuery = _mapDrive2QueryToDrive3(selectSubQuery)
|
|
56968
56969
|
if addCSVData:
|
|
@@ -57061,7 +57062,7 @@ def printFileList(users):
|
|
|
57061
57062
|
if incrementalPrint:
|
|
57062
57063
|
if countsOnly:
|
|
57063
57064
|
if summary != FILECOUNT_SUMMARY_NONE:
|
|
57064
|
-
for mimeType, mtinfo in
|
|
57065
|
+
for mimeType, mtinfo in mimeTypeInfo.items():
|
|
57065
57066
|
summaryMimeTypeInfo.setdefault(mimeType, {'count': 0, 'size': 0})
|
|
57066
57067
|
summaryMimeTypeInfo[mimeType]['count'] += mtinfo['count']
|
|
57067
57068
|
summaryMimeTypeInfo[mimeType]['size'] += mtinfo['size']
|
|
@@ -57141,7 +57142,7 @@ def printFileList(users):
|
|
|
57141
57142
|
if countsOnly:
|
|
57142
57143
|
if showSource:
|
|
57143
57144
|
if summary != FILECOUNT_SUMMARY_NONE:
|
|
57144
|
-
for mimeType, mtinfo in
|
|
57145
|
+
for mimeType, mtinfo in mimeTypeInfo.items():
|
|
57145
57146
|
summaryMimeTypeInfo.setdefault(mimeType, {'count': 0, 'size': 0})
|
|
57146
57147
|
summaryMimeTypeInfo[mimeType]['count'] += mtinfo['count']
|
|
57147
57148
|
summaryMimeTypeInfo[mimeType]['size'] += mtinfo['size']
|
|
@@ -57150,7 +57151,7 @@ def printFileList(users):
|
|
|
57150
57151
|
if countsOnly:
|
|
57151
57152
|
if not showSource:
|
|
57152
57153
|
if summary != FILECOUNT_SUMMARY_NONE:
|
|
57153
|
-
for mimeType, mtinfo in
|
|
57154
|
+
for mimeType, mtinfo in mimeTypeInfo.items():
|
|
57154
57155
|
summaryMimeTypeInfo.setdefault(mimeType, {'count': 0, 'size': 0})
|
|
57155
57156
|
summaryMimeTypeInfo[mimeType]['count'] += mtinfo['count']
|
|
57156
57157
|
summaryMimeTypeInfo[mimeType]['size'] += mtinfo['size']
|
|
@@ -57703,14 +57704,14 @@ def printShowFileCounts(users):
|
|
|
57703
57704
|
def showMimeTypeInfo(user, mimeTypeInfo, sharedDriveId, sharedDriveName, lastModification, i, count):
|
|
57704
57705
|
if summary != FILECOUNT_SUMMARY_NONE:
|
|
57705
57706
|
if count != 0:
|
|
57706
|
-
for mimeType, mtinfo in
|
|
57707
|
+
for mimeType, mtinfo in mimeTypeInfo.items():
|
|
57707
57708
|
summaryMimeTypeInfo.setdefault(mimeType, {'count': 0, 'size': 0})
|
|
57708
57709
|
summaryMimeTypeInfo[mimeType]['count'] += mtinfo['count']
|
|
57709
57710
|
summaryMimeTypeInfo[mimeType]['size'] += mtinfo['size']
|
|
57710
57711
|
if summary == FILECOUNT_SUMMARY_ONLY:
|
|
57711
57712
|
return
|
|
57712
57713
|
countTotal = sizeTotal = 0
|
|
57713
|
-
for mtinfo in
|
|
57714
|
+
for mtinfo in mimeTypeInfo.values():
|
|
57714
57715
|
countTotal += mtinfo['count']
|
|
57715
57716
|
sizeTotal += mtinfo['size']
|
|
57716
57717
|
if not csvPF:
|
|
@@ -57727,7 +57728,7 @@ def printShowFileCounts(users):
|
|
|
57727
57728
|
Ind.Increment()
|
|
57728
57729
|
if showLastModification:
|
|
57729
57730
|
_showLastModification(lastModification)
|
|
57730
|
-
for mimeType, mtinfo in sorted(
|
|
57731
|
+
for mimeType, mtinfo in sorted(mimeTypeInfo.items()):
|
|
57731
57732
|
if not showMimeTypeSize:
|
|
57732
57733
|
printKeyValueList([mimeType, mtinfo['count']])
|
|
57733
57734
|
else:
|
|
@@ -57744,7 +57745,7 @@ def printShowFileCounts(users):
|
|
|
57744
57745
|
_updateLastModificationRow(row, lastModification)
|
|
57745
57746
|
if addCSVData:
|
|
57746
57747
|
row.update(addCSVData)
|
|
57747
|
-
for mimeType, mtinfo in sorted(
|
|
57748
|
+
for mimeType, mtinfo in sorted(mimeTypeInfo.items()):
|
|
57748
57749
|
row[f'{mimeType}'] = mtinfo['count']
|
|
57749
57750
|
if showMimeTypeSize:
|
|
57750
57751
|
row[f'{mimeType}:Size'] = mtinfo['size']
|
|
@@ -58250,14 +58251,14 @@ def printShowFileShareCounts(users):
|
|
|
58250
58251
|
def showShareCounts(user, shareCounts, i, count):
|
|
58251
58252
|
if summary != FILECOUNT_SUMMARY_NONE:
|
|
58252
58253
|
if count != 0:
|
|
58253
|
-
for field, shareCount in
|
|
58254
|
+
for field, shareCount in shareCounts.items():
|
|
58254
58255
|
summaryShareCounts[field] += shareCount
|
|
58255
58256
|
if summary == FILECOUNT_SUMMARY_ONLY:
|
|
58256
58257
|
return
|
|
58257
58258
|
if not csvPF:
|
|
58258
58259
|
printEntity([Ent.USER, user, Ent.DRIVE_FILE_OR_FOLDER, shareCounts[FILESHARECOUNTS_TOTAL]], i, count)
|
|
58259
58260
|
Ind.Increment()
|
|
58260
|
-
for field, shareCount in
|
|
58261
|
+
for field, shareCount in shareCounts.items():
|
|
58261
58262
|
printKeyValueList([field, shareCount])
|
|
58262
58263
|
Ind.Decrement()
|
|
58263
58264
|
else:
|
|
@@ -59922,7 +59923,7 @@ def _copyPermissions(drive, user, i, count, j, jcount,
|
|
|
59922
59923
|
|
|
59923
59924
|
def getNonInheritedPermissions(permissions):
|
|
59924
59925
|
nonInheritedPermIds = set()
|
|
59925
|
-
for permissionId, permission in
|
|
59926
|
+
for permissionId, permission in permissions.items():
|
|
59926
59927
|
if not permission['inherited']:
|
|
59927
59928
|
nonInheritedPermIds.add(permissionId)
|
|
59928
59929
|
return nonInheritedPermIds
|
|
@@ -59946,7 +59947,7 @@ def _copyPermissions(drive, user, i, count, j, jcount,
|
|
|
59946
59947
|
copySourcePerms = {}
|
|
59947
59948
|
deleteTargetPermIds = set()
|
|
59948
59949
|
updateTargetPerms = {}
|
|
59949
|
-
for permissionId, permission in
|
|
59950
|
+
for permissionId, permission in sourcePerms.items():
|
|
59950
59951
|
kvList = permissionKVList(user, entityType, newFileTitle, permission)
|
|
59951
59952
|
if isPermissionCopyable(kvList, permission):
|
|
59952
59953
|
copySourcePerms[permissionId] = permission
|
|
@@ -60000,7 +60001,7 @@ def _copyPermissions(drive, user, i, count, j, jcount,
|
|
|
60000
60001
|
Act.Set(Act.COPY)
|
|
60001
60002
|
kcount = len(copySourcePerms)
|
|
60002
60003
|
k = 0
|
|
60003
|
-
for permissionId, permission in
|
|
60004
|
+
for permissionId, permission in copySourcePerms.items():
|
|
60004
60005
|
k += 1
|
|
60005
60006
|
kvList = permissionKVList(user, entityType, newFileTitle, permission)
|
|
60006
60007
|
permission.pop('id')
|
|
@@ -60073,7 +60074,7 @@ def _copyPermissions(drive, user, i, count, j, jcount,
|
|
|
60073
60074
|
Act.Set(Act.UPDATE)
|
|
60074
60075
|
kcount = len(updateTargetPerms)
|
|
60075
60076
|
k = 0
|
|
60076
|
-
for permissionId, permission in
|
|
60077
|
+
for permissionId, permission in updateTargetPerms.items():
|
|
60077
60078
|
k += 1
|
|
60078
60079
|
kvList = permissionKVList(user, entityType, newFileTitle, permission)
|
|
60079
60080
|
removeExpiration = permission['updates'].pop('removeExpiration', False)
|
|
@@ -61072,7 +61073,7 @@ def _updateMoveFilePermissions(drive, user, i, count,
|
|
|
61072
61073
|
Ind.Increment()
|
|
61073
61074
|
deleteSourcePerms = {}
|
|
61074
61075
|
addSourcePerms = {}
|
|
61075
|
-
for permissionId, permission in
|
|
61076
|
+
for permissionId, permission in sourcePerms.items():
|
|
61076
61077
|
kvList = permissionKVList(user, entityType, fileTitle, permission)
|
|
61077
61078
|
if isPermissionDeletable(kvList, permission):
|
|
61078
61079
|
pass
|
|
@@ -61083,7 +61084,7 @@ def _updateMoveFilePermissions(drive, user, i, count,
|
|
|
61083
61084
|
if kcount > 0:
|
|
61084
61085
|
Act.Set(Act.DELETE)
|
|
61085
61086
|
k = 0
|
|
61086
|
-
for permissionId, permission in
|
|
61087
|
+
for permissionId, permission in deleteSourcePerms.items():
|
|
61087
61088
|
k += 1
|
|
61088
61089
|
kvList = permissionKVList(user, entityType, fileTitle, permission)
|
|
61089
61090
|
try:
|
|
@@ -61110,7 +61111,7 @@ def _updateMoveFilePermissions(drive, user, i, count,
|
|
|
61110
61111
|
if kcount > 0:
|
|
61111
61112
|
Act.Set(Act.CREATE)
|
|
61112
61113
|
k = 0
|
|
61113
|
-
for permissionId, permission in
|
|
61114
|
+
for permissionId, permission in addSourcePerms.items():
|
|
61114
61115
|
k += 1
|
|
61115
61116
|
kvList = permissionKVList(user, entityType, fileTitle, permission)
|
|
61116
61117
|
permission.pop('id')
|
|
@@ -63429,7 +63430,7 @@ def transferOwnership(users):
|
|
|
63429
63430
|
else:
|
|
63430
63431
|
_identifyChildrenToTransfer(fileEntryInfo, user, i, count)
|
|
63431
63432
|
if csvPF:
|
|
63432
|
-
for xferFileId, fileInfo in
|
|
63433
|
+
for xferFileId, fileInfo in filesToTransfer.items():
|
|
63433
63434
|
row = {'OldOwner': user, 'NewOwner': newOwner, 'type': Ent.Singular(fileInfo['type']), 'id': xferFileId, 'name': fileInfo['name']}
|
|
63434
63435
|
if filepath:
|
|
63435
63436
|
addFilePathsToRow(drive, fileTree, fileTree[xferFileId]['info'], filePathInfo, csvPF, row)
|
|
@@ -63440,7 +63441,7 @@ def transferOwnership(users):
|
|
|
63440
63441
|
entityPerformActionNumItemsModifier([Ent.USER, user], kcount, Ent.DRIVE_FILE_OR_FOLDER, f'{Act.MODIFIER_TO} {Ent.Singular(Ent.USER)}: {newOwner}', i, count)
|
|
63441
63442
|
Ind.Increment()
|
|
63442
63443
|
k = 0
|
|
63443
|
-
for xferFileId, fileInfo in
|
|
63444
|
+
for xferFileId, fileInfo in filesToTransfer.items():
|
|
63444
63445
|
k += 1
|
|
63445
63446
|
entityType = fileInfo['type']
|
|
63446
63447
|
fileDesc = f'{fileInfo["name"]} ({xferFileId})'
|
|
@@ -63800,8 +63801,8 @@ def claimOwnership(users):
|
|
|
63800
63801
|
else:
|
|
63801
63802
|
_identifyChildrenToClaim(fileEntryInfo, user, i, count)
|
|
63802
63803
|
if csvPF:
|
|
63803
|
-
for oldOwner in filesToClaim:
|
|
63804
|
-
for claimFileId, fileInfo in
|
|
63804
|
+
for oldOwner, oldOwnerFilesToClaim in filesToClaim.items():
|
|
63805
|
+
for claimFileId, fileInfo in oldOwnerFilesToClaim.items():
|
|
63805
63806
|
row = {'NewOwner': user, 'OldOwner': oldOwner, 'type': Ent.Singular(fileInfo['type']), 'id': claimFileId, 'name': fileInfo['name']}
|
|
63806
63807
|
if filepath:
|
|
63807
63808
|
addFilePathsToRow(drive, fileTree, fileTree[claimFileId]['info'], filePathInfo, csvPF, row)
|
|
@@ -63812,10 +63813,10 @@ def claimOwnership(users):
|
|
|
63812
63813
|
entityPerformActionNumItems([Ent.USER, user], kcount, Ent.USER, i, count)
|
|
63813
63814
|
Ind.Increment()
|
|
63814
63815
|
k = 0
|
|
63815
|
-
for oldOwner in filesToClaim:
|
|
63816
|
+
for oldOwner, oldOwnerFilesToClaim in filesToClaim.items():
|
|
63816
63817
|
k += 1
|
|
63817
63818
|
_, userDomain = splitEmailAddress(oldOwner)
|
|
63818
|
-
lcount = len(
|
|
63819
|
+
lcount = len(oldOwnerFilesToClaim)
|
|
63819
63820
|
if userDomain == GC.Values[GC.DOMAIN] or userDomain in subdomains:
|
|
63820
63821
|
_, sourceDrive = buildGAPIServiceObject(API.DRIVE3, oldOwner, k, kcount)
|
|
63821
63822
|
if not sourceDrive:
|
|
@@ -63824,7 +63825,7 @@ def claimOwnership(users):
|
|
|
63824
63825
|
f'{Act.MODIFIER_FROM} {Ent.Singular(Ent.USER)}: {oldOwner}', k, kcount)
|
|
63825
63826
|
Ind.Increment()
|
|
63826
63827
|
l = 0
|
|
63827
|
-
for xferFileId, fileInfo in
|
|
63828
|
+
for xferFileId, fileInfo in oldOwnerFilesToClaim.items():
|
|
63828
63829
|
l += 1
|
|
63829
63830
|
entityType = fileInfo['type']
|
|
63830
63831
|
fileDesc = f'{fileInfo["name"]} ({xferFileId})'
|
|
@@ -63937,7 +63938,7 @@ def claimOwnership(users):
|
|
|
63937
63938
|
f'{Act.MODIFIER_FROM} {Ent.Singular(Ent.USER)}: {oldOwner}', j, jcount)
|
|
63938
63939
|
Ind.Increment()
|
|
63939
63940
|
l = 0
|
|
63940
|
-
for xferFileId, fileInfo in
|
|
63941
|
+
for xferFileId, fileInfo in oldOwnerFilesToClaim.items():
|
|
63941
63942
|
l += 1
|
|
63942
63943
|
entityActionNotPerformedWarning([Ent.USER, user, fileInfo['type'], f'{fileInfo["name"]} ({xferFileId})'],
|
|
63943
63944
|
Msg.USER_IN_OTHER_DOMAIN.format(Ent.Singular(Ent.USER), oldOwner), l, lcount)
|
|
@@ -66275,7 +66276,7 @@ SHAREDDRIVE_API_GUI_ROLES_MAP = {
|
|
|
66275
66276
|
def _getSharedDriveRole(shareddrive):
|
|
66276
66277
|
if 'capabilities' not in shareddrive:
|
|
66277
66278
|
return None
|
|
66278
|
-
for role, capabilities in
|
|
66279
|
+
for role, capabilities in SHAREDDRIVE_ROLES_CAPABILITIES_MAP.items():
|
|
66279
66280
|
match = True
|
|
66280
66281
|
for capability in capabilities:
|
|
66281
66282
|
if capabilities[capability] != shareddrive['capabilities'].get(capability, ''):
|
|
@@ -67008,7 +67009,7 @@ def printSharedDriveOrganizers(users, useDomainAdminAccess=False):
|
|
|
67008
67009
|
delimiter = GC.Values[GC.CSV_OUTPUT_FIELD_DELIMITER]
|
|
67009
67010
|
roles = set(['organizer'])
|
|
67010
67011
|
includeTypes = set()
|
|
67011
|
-
showNoOrganizerDrives = SHOW_NO_PERMISSIONS_DRIVES_CHOICE_MAP['
|
|
67012
|
+
showNoOrganizerDrives = SHOW_NO_PERMISSIONS_DRIVES_CHOICE_MAP['true']
|
|
67012
67013
|
fieldsList = ['role', 'type', 'emailAddress']
|
|
67013
67014
|
cd = entityList = orgUnitId = query = matchPattern = None
|
|
67014
67015
|
domainList = set([(GC.Values[GC.DOMAIN] if GC.Values[GC.DOMAIN] else _getValueFromOAuth('hd'))])
|
|
@@ -69698,12 +69699,12 @@ def _printShowTokens(entityType, users):
|
|
|
69698
69699
|
performActionNumItems(jcount, Ent.ACCESS_TOKEN)
|
|
69699
69700
|
Ind.Increment()
|
|
69700
69701
|
j = 0
|
|
69701
|
-
for _, token in sorted(
|
|
69702
|
+
for _, token in sorted(aggregateTokensById.items()):
|
|
69702
69703
|
j += 1
|
|
69703
69704
|
_showToken(token, tokenTitle, aggregateUsersBy, j, jcount)
|
|
69704
69705
|
Ind.Decrement()
|
|
69705
69706
|
else:
|
|
69706
|
-
for _, token in sorted(
|
|
69707
|
+
for _, token in sorted(aggregateTokensById.items()):
|
|
69707
69708
|
_printToken(token)
|
|
69708
69709
|
elif aggregateUsersBy == 'displayText':
|
|
69709
69710
|
if not csvPF:
|
|
@@ -69711,24 +69712,24 @@ def _printShowTokens(entityType, users):
|
|
|
69711
69712
|
performActionNumItems(jcount, Ent.ACCESS_TOKEN)
|
|
69712
69713
|
Ind.Increment()
|
|
69713
69714
|
j = 0
|
|
69714
|
-
for _, tokenIds in sorted(
|
|
69715
|
+
for _, tokenIds in sorted(tokenNameIdMap.items()):
|
|
69715
69716
|
for tokcid in sorted(tokenIds):
|
|
69716
69717
|
j += 1
|
|
69717
69718
|
_showToken(aggregateTokensById[tokcid], tokenTitle, aggregateUsersBy, j, jcount)
|
|
69718
69719
|
Ind.Decrement()
|
|
69719
69720
|
else:
|
|
69720
|
-
for _, tokenIds in sorted(
|
|
69721
|
+
for _, tokenIds in sorted(tokenNameIdMap.items()):
|
|
69721
69722
|
for tokcid in sorted(tokenIds):
|
|
69722
69723
|
_printToken(aggregateTokensById[tokcid])
|
|
69723
69724
|
else: # aggregateUsersBy == 'user':
|
|
69724
69725
|
if not csvPF:
|
|
69725
69726
|
jcount = len(aggregateTokensById)
|
|
69726
69727
|
j = 0
|
|
69727
|
-
for user, count in sorted(
|
|
69728
|
+
for user, count in sorted(aggregateTokensById.items()):
|
|
69728
69729
|
j += 1
|
|
69729
69730
|
printEntityKVList([Ent.USER, user], [Ent.Plural(Ent.ACCESS_TOKEN), count], j, jcount)
|
|
69730
69731
|
else:
|
|
69731
|
-
for user, count in sorted(
|
|
69732
|
+
for user, count in sorted(aggregateTokensById.items()):
|
|
69732
69733
|
csvPF.WriteRow({'user': user, 'tokenCount': count})
|
|
69733
69734
|
if csvPF:
|
|
69734
69735
|
csvPF.writeCSVfile('OAuth Tokens')
|
|
@@ -70629,10 +70630,10 @@ def printShowLabels(users):
|
|
|
70629
70630
|
labelTree = _buildLabelTree(labels)
|
|
70630
70631
|
Ind.Increment()
|
|
70631
70632
|
if not showNested:
|
|
70632
|
-
for label, _ in sorted(
|
|
70633
|
+
for label, _ in sorted(labelTree.items(), key=lambda k: (k[1]['info']['type'], k[1]['info']['name'])):
|
|
70633
70634
|
_printFlatLabel(labelTree[label])
|
|
70634
70635
|
else:
|
|
70635
|
-
for label, _ in sorted(
|
|
70636
|
+
for label, _ in sorted(labelTree.items(), key=lambda k: (k[1]['info']['type'], k[1]['info']['name'])):
|
|
70636
70637
|
_printNestedLabel(labelTree[label])
|
|
70637
70638
|
Ind.Decrement()
|
|
70638
70639
|
else:
|
|
@@ -70810,7 +70811,7 @@ def _finalizeMessageSelectParameters(parameters, queryOrIdsRequired):
|
|
|
70810
70811
|
if parameters['labelGroupOpen']:
|
|
70811
70812
|
parameters['query'] += ')'
|
|
70812
70813
|
if parameters['queryTimes']:
|
|
70813
|
-
for queryTimeName, queryTimeValue in
|
|
70814
|
+
for queryTimeName, queryTimeValue in parameters['queryTimes'].items():
|
|
70814
70815
|
parameters['query'] = parameters['query'].replace(f'#{queryTimeName}#', queryTimeValue)
|
|
70815
70816
|
_mapMessageQueryDates(parameters)
|
|
70816
70817
|
elif queryOrIdsRequired and parameters['messageEntity'] is None and not parameters['labelIds']:
|
|
@@ -71696,7 +71697,7 @@ def _draftImportInsertMessage(users, operation):
|
|
|
71696
71697
|
message = MIMEText(tmpHTML, 'html', UTF8)
|
|
71697
71698
|
else:
|
|
71698
71699
|
message = MIMEText(tmpText, 'plain', UTF8)
|
|
71699
|
-
for header, value in
|
|
71700
|
+
for header, value in msgHeaders.items():
|
|
71700
71701
|
if substituteForUserInHeaders:
|
|
71701
71702
|
value = _substituteForUser(value, user, userName)
|
|
71702
71703
|
message[header] = Header()
|
|
@@ -71716,7 +71717,7 @@ def _draftImportInsertMessage(users, operation):
|
|
|
71716
71717
|
body = {'raw': base64.urlsafe_b64encode(bytes(tmpFile.read(), UTF8)).decode()}
|
|
71717
71718
|
tmpFile.close()
|
|
71718
71719
|
else:
|
|
71719
|
-
for header, value in
|
|
71720
|
+
for header, value in msgHeaders.items():
|
|
71720
71721
|
if substituteForUserInHeaders:
|
|
71721
71722
|
value = _substituteForUser(value, user, userName)
|
|
71722
71723
|
msgText = re.sub(fr'(?sm)\n{header}:.+?(?=[\r\n]+[a-zA-Z0-9-]+:)', f'\n{header}: {value}', msgText, 1)
|
|
@@ -72494,14 +72495,14 @@ def printShowMessagesThreads(users, entityType):
|
|
|
72494
72495
|
if onlyUser or positiveCountsOnly or labelMatchPattern:
|
|
72495
72496
|
for sender in senderLabelsMaps:
|
|
72496
72497
|
userLabelsMap = {}
|
|
72497
|
-
for labelId, label in
|
|
72498
|
+
for labelId, label in senderLabelsMaps[sender].items():
|
|
72498
72499
|
if (label['match'] and
|
|
72499
72500
|
(not onlyUser or label['type'] != LABEL_TYPE_SYSTEM) and
|
|
72500
72501
|
(not positiveCountsOnly or label['count'] > 0)):
|
|
72501
72502
|
userLabelsMap[labelId] = label
|
|
72502
72503
|
senderLabelsMaps[sender] = userLabelsMap
|
|
72503
72504
|
if not csvPF:
|
|
72504
|
-
for sender, labelsMap in sorted(
|
|
72505
|
+
for sender, labelsMap in sorted(senderLabelsMaps.items()):
|
|
72505
72506
|
jcount = len(labelsMap)
|
|
72506
72507
|
kvlist = [Ent.USER, user]
|
|
72507
72508
|
if senderMatchPattern:
|
|
@@ -72509,7 +72510,7 @@ def printShowMessagesThreads(users, entityType):
|
|
|
72509
72510
|
entityPerformActionNumItems(kvlist, jcount, Ent.LABEL, i, count)
|
|
72510
72511
|
Ind.Increment()
|
|
72511
72512
|
j = 0
|
|
72512
|
-
for label in sorted(
|
|
72513
|
+
for label in sorted(labelsMap.values(), key=lambda k: k['name']):
|
|
72513
72514
|
j += 1
|
|
72514
72515
|
if not show_size:
|
|
72515
72516
|
printEntityKVList([Ent.LABEL, label['name']], ['Count', label['count'], 'Type', label['type']], j, jcount)
|
|
@@ -72517,7 +72518,7 @@ def printShowMessagesThreads(users, entityType):
|
|
|
72517
72518
|
printEntityKVList([Ent.LABEL, label['name']], ['Count', label['count'], 'Size', label['size'], 'Type', label['type']], j, jcount)
|
|
72518
72519
|
Ind.Decrement()
|
|
72519
72520
|
else:
|
|
72520
|
-
for sender, labelsMap in sorted(
|
|
72521
|
+
for sender, labelsMap in sorted(senderLabelsMaps.items()):
|
|
72521
72522
|
row = {'User': user}
|
|
72522
72523
|
if senderMatchPattern:
|
|
72523
72524
|
row['Sender'] = sender
|
|
@@ -72526,7 +72527,7 @@ def printShowMessagesThreads(users, entityType):
|
|
|
72526
72527
|
label.pop('size', None)
|
|
72527
72528
|
if addCSVData:
|
|
72528
72529
|
row.update(addCSVData)
|
|
72529
|
-
csvPF.WriteRowTitles(flattenJSON({'Labels': sorted(
|
|
72530
|
+
csvPF.WriteRowTitles(flattenJSON({'Labels': sorted(labelsMap.values(), key=lambda k: k['name'])}, flattened=row))
|
|
72530
72531
|
elif not senderMatchPattern:
|
|
72531
72532
|
v = messageThreadCounts[parameters['listType']]
|
|
72532
72533
|
if not positiveCountsOnly or v > 0:
|
|
@@ -72542,11 +72543,11 @@ def printShowMessagesThreads(users, entityType):
|
|
|
72542
72543
|
else:
|
|
72543
72544
|
if not show_size:
|
|
72544
72545
|
if not csvPF:
|
|
72545
|
-
for k, v in sorted(
|
|
72546
|
+
for k, v in sorted(senderCounts.items()):
|
|
72546
72547
|
if not positiveCountsOnly or v['count'] > 0:
|
|
72547
72548
|
printEntityKVList([Ent.USER, user, Ent.SENDER, k], [parameters['listType'], v['count']], i, count)
|
|
72548
72549
|
else:
|
|
72549
|
-
for k, v in sorted(
|
|
72550
|
+
for k, v in sorted(senderCounts.items()):
|
|
72550
72551
|
if not positiveCountsOnly or v['count'] > 0:
|
|
72551
72552
|
row = {'User': user, 'Sender': k, parameters['listType']: v['count']}
|
|
72552
72553
|
if addCSVData:
|
|
@@ -72554,11 +72555,11 @@ def printShowMessagesThreads(users, entityType):
|
|
|
72554
72555
|
csvPF.WriteRow(row)
|
|
72555
72556
|
else:
|
|
72556
72557
|
if not csvPF:
|
|
72557
|
-
for k, v in sorted(
|
|
72558
|
+
for k, v in sorted(senderCounts.items()):
|
|
72558
72559
|
if not positiveCountsOnly or v['count'] > 0:
|
|
72559
72560
|
printEntityKVList([Ent.USER, user, Ent.SENDER, k], [parameters['listType'], v['count'], 'size', v['size']], i, count)
|
|
72560
72561
|
else:
|
|
72561
|
-
for k, v in sorted(
|
|
72562
|
+
for k, v in sorted(senderCounts.items()):
|
|
72562
72563
|
if not positiveCountsOnly or v['count'] > 0:
|
|
72563
72564
|
row = {'User': user, 'Sender': k, parameters['listType']: v['count'], 'size': v['size']}
|
|
72564
72565
|
if addCSVData:
|
|
@@ -73037,7 +73038,7 @@ def createFilter(users):
|
|
|
73037
73038
|
try:
|
|
73038
73039
|
lcount = len(addLabelIndicies)
|
|
73039
73040
|
l = 0
|
|
73040
|
-
for addLabelName, addLabelData in
|
|
73041
|
+
for addLabelName, addLabelData in addLabelIndicies.items():
|
|
73041
73042
|
l += 1
|
|
73042
73043
|
retries = 3
|
|
73043
73044
|
for _ in range(1, retries+1):
|
|
@@ -73519,7 +73520,7 @@ def printShowFormResponses(users):
|
|
|
73519
73520
|
else:
|
|
73520
73521
|
FJQC.GetFormatJSONQuoteChar(myarg, True)
|
|
73521
73522
|
if filterTimes and frfilter is not None:
|
|
73522
|
-
for filterTimeName, filterTimeValue in
|
|
73523
|
+
for filterTimeName, filterTimeValue in filterTimes.items():
|
|
73523
73524
|
frfilter = frfilter.replace(f'#{filterTimeName}#', filterTimeValue)
|
|
73524
73525
|
if csvPF:
|
|
73525
73526
|
if countsOnly:
|
|
@@ -78753,7 +78754,7 @@ def showAPICallsRetryData():
|
|
|
78753
78754
|
Ind.Reset()
|
|
78754
78755
|
writeStderr(Msg.API_CALLS_RETRY_DATA)
|
|
78755
78756
|
Ind.Increment()
|
|
78756
|
-
for k, v in sorted(
|
|
78757
|
+
for k, v in sorted(GM.Globals[GM.API_CALLS_RETRY_DATA].items()):
|
|
78757
78758
|
m, s = divmod(int(v[1]), 60)
|
|
78758
78759
|
h, m = divmod(m, 60)
|
|
78759
78760
|
writeStderr(formatKeyValueList(Ind.Spaces(), [k, f'{v[0]}/{h}:{m:02d}:{s:02d}'], '\n'))
|