illumio-pylo 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. illumio_pylo/API/APIConnector.py +1308 -0
  2. illumio_pylo/API/AuditLog.py +42 -0
  3. illumio_pylo/API/ClusterHealth.py +136 -0
  4. illumio_pylo/API/CredentialsManager.py +286 -0
  5. illumio_pylo/API/Explorer.py +1077 -0
  6. illumio_pylo/API/JsonPayloadTypes.py +240 -0
  7. illumio_pylo/API/RuleSearchQuery.py +128 -0
  8. illumio_pylo/API/__init__.py +0 -0
  9. illumio_pylo/AgentStore.py +139 -0
  10. illumio_pylo/Exception.py +44 -0
  11. illumio_pylo/Helpers/__init__.py +3 -0
  12. illumio_pylo/Helpers/exports.py +508 -0
  13. illumio_pylo/Helpers/functions.py +166 -0
  14. illumio_pylo/IPList.py +135 -0
  15. illumio_pylo/IPMap.py +285 -0
  16. illumio_pylo/Label.py +25 -0
  17. illumio_pylo/LabelCommon.py +48 -0
  18. illumio_pylo/LabelGroup.py +68 -0
  19. illumio_pylo/LabelStore.py +403 -0
  20. illumio_pylo/LabeledObject.py +25 -0
  21. illumio_pylo/Organization.py +258 -0
  22. illumio_pylo/Query.py +331 -0
  23. illumio_pylo/ReferenceTracker.py +41 -0
  24. illumio_pylo/Rule.py +671 -0
  25. illumio_pylo/Ruleset.py +306 -0
  26. illumio_pylo/RulesetStore.py +101 -0
  27. illumio_pylo/SecurityPrincipal.py +62 -0
  28. illumio_pylo/Service.py +256 -0
  29. illumio_pylo/SoftwareVersion.py +125 -0
  30. illumio_pylo/VirtualService.py +17 -0
  31. illumio_pylo/VirtualServiceStore.py +75 -0
  32. illumio_pylo/Workload.py +506 -0
  33. illumio_pylo/WorkloadStore.py +289 -0
  34. illumio_pylo/__init__.py +82 -0
  35. illumio_pylo/cli/NativeParsers.py +96 -0
  36. illumio_pylo/cli/__init__.py +134 -0
  37. illumio_pylo/cli/__main__.py +10 -0
  38. illumio_pylo/cli/commands/__init__.py +32 -0
  39. illumio_pylo/cli/commands/credential_manager.py +168 -0
  40. illumio_pylo/cli/commands/iplist_import_from_file.py +185 -0
  41. illumio_pylo/cli/commands/misc.py +7 -0
  42. illumio_pylo/cli/commands/ruleset_export.py +129 -0
  43. illumio_pylo/cli/commands/update_pce_objects_cache.py +44 -0
  44. illumio_pylo/cli/commands/ven_duplicate_remover.py +366 -0
  45. illumio_pylo/cli/commands/ven_idle_to_visibility.py +287 -0
  46. illumio_pylo/cli/commands/ven_upgrader.py +226 -0
  47. illumio_pylo/cli/commands/workload_export.py +251 -0
  48. illumio_pylo/cli/commands/workload_import.py +423 -0
  49. illumio_pylo/cli/commands/workload_relabeler.py +510 -0
  50. illumio_pylo/cli/commands/workload_reset_names_to_null.py +83 -0
  51. illumio_pylo/cli/commands/workload_used_in_rule_finder.py +80 -0
  52. illumio_pylo/docs/Doxygen +1757 -0
  53. illumio_pylo/tmp.py +104 -0
  54. illumio_pylo/utilities/__init__.py +0 -0
  55. illumio_pylo/utilities/cli.py +10 -0
  56. illumio_pylo/utilities/credentials.example.json +20 -0
  57. illumio_pylo/utilities/explorer_report_exporter.py +86 -0
  58. illumio_pylo/utilities/health_monitoring.py +102 -0
  59. illumio_pylo/utilities/iplist_analyzer.py +148 -0
  60. illumio_pylo/utilities/iplists_stats_duplicates_unused_finder.py +75 -0
  61. illumio_pylo/utilities/resources/iplists-import-example.csv +3 -0
  62. illumio_pylo/utilities/resources/iplists-import-example.xlsx +0 -0
  63. illumio_pylo/utilities/resources/workload-exporter-filter-example.csv +3 -0
  64. illumio_pylo/utilities/resources/workloads-import-example.csv +2 -0
  65. illumio_pylo/utilities/resources/workloads-import-example.xlsx +0 -0
  66. illumio_pylo/utilities/ven_compatibility_report_export.py +240 -0
  67. illumio_pylo/utilities/ven_idle_to_illumination.py +344 -0
  68. illumio_pylo/utilities/ven_reassign_pce.py +183 -0
  69. illumio_pylo-0.2.5.dist-info/LICENSE +176 -0
  70. illumio_pylo-0.2.5.dist-info/METADATA +197 -0
  71. illumio_pylo-0.2.5.dist-info/RECORD +73 -0
  72. illumio_pylo-0.2.5.dist-info/WHEEL +5 -0
  73. illumio_pylo-0.2.5.dist-info/top_level.txt +1 -0
@@ -0,0 +1,423 @@
1
+ from typing import Dict, List, Any
2
+ from dataclasses import dataclass
3
+ import sys
4
+ import argparse
5
+ import math
6
+ import illumio_pylo as pylo
7
+ from illumio_pylo.API.JsonPayloadTypes import WorkloadObjectCreateJsonStructure
8
+ from .misc import make_filename_with_timestamp
9
+ from . import Command
10
+
11
+ command_name = 'workload-import'
12
+ objects_load_filter = ['workloads', 'labels']
13
+
14
+
15
+ @dataclass
16
+ class CollisionItem:
17
+ managed: bool
18
+ from_pce: bool = False
19
+ workload_object: pylo.Workload = None
20
+ csv_object: Dict[str, Any] = None
21
+
22
+
23
+ def fill_parser(parser: argparse.ArgumentParser):
24
+ parser.add_argument('--input-file', '-i', type=str, required=True,
25
+ help='CSV or Excel input filename')
26
+ parser.add_argument('--input-file-delimiter', type=str, required=False, default=',',
27
+ help='CSV field delimiter')
28
+
29
+ parser.add_argument('--input-filter-file', type=str, required=False, default=None,
30
+ help='CSV/Excel file used to keep only the lines of interest from the input file')
31
+
32
+ parser.add_argument('--ignore-if-managed-workload-exists', action='store_true',
33
+ help='If a Managed Workload with same same exists, ignore CSV entry')
34
+ # parser.add_argument('--ignore-label-case-collisions', type=bool, nargs='?', required=False, default=False, const=True,
35
+ # help='Use this option if you want allow Workloads to be created with labels with same name but different case (Illumio PCE allows it but its definitely a bad practice!)')
36
+ parser.add_argument('--ignore-all-sorts-collisions', action='store_true',
37
+ help='If names/hostnames/ips collisions are found ignore these CSV/Excel entries')
38
+
39
+ parser.add_argument('--ignore-empty-ip-entries', action='store_true',
40
+ help="if an entry has no IP address it will be ignored")
41
+
42
+ parser.add_argument('--batch-size', type=int, required=False, default=500,
43
+ help='Number of Workloads to create per API call')
44
+
45
+ parser.add_argument('--confirm', action='store_true',
46
+ help="No change will be implemented in the PCE until you use this function to confirm you're good with them after review")
47
+
48
+
49
+ def __main(args, org: pylo.Organization, **kwargs):
50
+ input_file = args['input_file']
51
+ input_filter_file = args["input_filter_file"]
52
+ input_file_delimiter = args['input_file_delimiter']
53
+ ignore_if_managed_workload_exists = args['ignore_if_managed_workload_exists']
54
+ ignore_all_sorts_collisions = args['ignore_all_sorts_collisions']
55
+ settings_ignore_empty_ip_entries: bool = args['ignore_empty_ip_entries']
56
+ # ignore_label_case_collisions = args['ignore_label_case_collisions']
57
+ batch_size = args['batch_size']
58
+ settings_confirmed_change: bool = args['confirm']
59
+
60
+ output_file_prefix = make_filename_with_timestamp('import-umw-results_')
61
+ output_file_csv = output_file_prefix + '.csv'
62
+ output_file_excel = output_file_prefix + '.xlsx'
63
+
64
+ csv_expected_fields: List[Dict] = [
65
+ {'name': 'name', 'optional': True, 'default': ''},
66
+ {'name': 'hostname', 'optional': False},
67
+ {'name': 'ip', 'optional': False},
68
+ {'name': 'description', 'optional': True, 'default': ''}
69
+ ]
70
+
71
+ # each label type/dimension is optional
72
+ for label_type in org.LabelStore.label_types:
73
+ csv_expected_fields.append({'name': f"label_{label_type}" , 'optional': True})
74
+
75
+ csv_filter_fields = [
76
+ {'name': 'ip', 'optional': False},
77
+ ]
78
+
79
+ csv_created_fields = csv_expected_fields.copy()
80
+ csv_created_fields.append({'name': 'href'})
81
+ csv_created_fields.append({'name': '**not_created_reason**'})
82
+
83
+ pylo.file_clean(output_file_csv)
84
+ pylo.file_clean(output_file_excel)
85
+
86
+ print(" * Loading CSV input file '{}'...".format(input_file), flush=True, end='')
87
+ csv_data = pylo.CsvExcelToObject(input_file, expected_headers=csv_expected_fields, csv_delimiter=input_file_delimiter)
88
+ print('OK')
89
+ print(" - CSV has {} columns and {} lines (headers don't count)".format(csv_data.count_columns(), csv_data.count_lines()))
90
+ # print(pylo.nice_json(csv_data._objects))
91
+
92
+ # <editor-fold desc="Name/Hostname collision detection">
93
+ print(" * Checking for name/hostname collisions:", flush=True)
94
+ name_cache: Dict[str, CollisionItem] = {}
95
+ for workload in org.WorkloadStore.itemsByHRef.values():
96
+ lower_name = None
97
+ if workload.forced_name is not None and len(workload.forced_name) > 0:
98
+ lower_name = workload.forced_name.lower()
99
+ if lower_name not in name_cache:
100
+ name_cache[lower_name] = CollisionItem(from_pce=True, workload_object=workload, managed=not workload.unmanaged)
101
+ else:
102
+ print(" - Warning duplicate found in the PCE for hostname/name: {}".format(workload.get_name()))
103
+ if workload.hostname is not None and len(workload.hostname) > 0:
104
+ lower_hostname = workload.hostname.lower()
105
+ if lower_name != lower_hostname:
106
+ if workload.hostname not in name_cache:
107
+ name_cache[workload.hostname] = CollisionItem(from_pce=True, workload_object=workload, managed=not workload.unmanaged)
108
+ else:
109
+ print(" - Warning duplicate found in the PCE for hostname/name: {}".format(workload.hostname))
110
+
111
+ for csv_object in csv_data.objects():
112
+ if '**not_created_reason**' in csv_object:
113
+ continue
114
+ lower_name = None
115
+ if csv_object['name'] is not None and len(csv_object['name']) > 0:
116
+ lower_name = csv_object['name'].lower()
117
+ if lower_name not in name_cache:
118
+ name_cache[lower_name] = CollisionItem(from_pce=False, csv_object=csv_object, managed=False)
119
+ else:
120
+ if 'csv' in name_cache[lower_name]:
121
+ raise pylo.PyloEx('CSV contains workloads with duplicates name/hostname: {}'.format(lower_name))
122
+ else:
123
+ csv_object['**not_created_reason**'] = 'Found duplicated name/hostname in PCE'
124
+ if ignore_all_sorts_collisions or ignore_if_managed_workload_exists:
125
+ pass
126
+ elif not name_cache[lower_name].managed:
127
+ raise pylo.PyloEx("PCE contains workloads with duplicates name/hostname from CSV: '{}' at line #{}".format(lower_name, csv_object['*line*']))
128
+ print(" - WARNING: CSV has an entry for workload name '{}' at line #{} but it exists already in the PCE. It will be ignored.".format(lower_name, csv_object['*line*']))
129
+
130
+ if csv_object['hostname'] is not None and len(csv_object['hostname']) > 0:
131
+ lower_hostname = csv_object['hostname'].lower()
132
+ if lower_name != lower_hostname:
133
+ if csv_object['hostname'] not in name_cache:
134
+ name_cache[csv_object['hostname']] = CollisionItem(from_pce=False, csv_object=csv_object, managed=False)
135
+ else:
136
+ if 'csv' in name_cache[lower_name]:
137
+ raise pylo.PyloEx('CSV contains workloads with duplicates name/hostname: {}'.format(lower_name))
138
+ else:
139
+ csv_object['**not_created_reason**'] = 'Found duplicated name/hostname in PCE'
140
+ if not ignore_all_sorts_collisions or not name_cache[lower_name].managed or not ignore_if_managed_workload_exists:
141
+ raise pylo.PyloEx("PCE contains workloads with duplicates name/hostname from CSV: '{}' at line #{}".format(lower_name, csv_object['*line*']))
142
+ print(" - WARNING: CSV has an entry for workload hostname '{}' at line #{} but it exists already in the PCE. It will be ignored.".format(lower_name, csv_object['*line*']))
143
+
144
+ del name_cache
145
+ print(" * DONE")
146
+ # </editor-fold>
147
+
148
+ # <editor-fold desc="IP Collision detection">
149
+ print(" * Checking for IP addresses collisions:")
150
+ ip_cache: Dict[str, CollisionItem] = {}
151
+ count_duplicate_ip_addresses_in_csv = 0
152
+ for workload in org.WorkloadStore.itemsByHRef.values():
153
+ for interface in workload.interfaces:
154
+ if interface.ip not in ip_cache:
155
+ ip_cache[interface.ip] = CollisionItem(from_pce=True, workload_object=workload, managed=not workload.unmanaged)
156
+ else:
157
+ print(" - Warning duplicate IPs found in the PCE between 2 workloads ({} and {}) for IP: {}".format(
158
+ workload.get_name(), ip_cache[interface.ip].workload_object.get_name(), interface.ip))
159
+
160
+ for csv_object in csv_data.objects():
161
+ if '**not_created_reason**' in csv_object:
162
+ continue
163
+
164
+ ips = csv_object['ip']
165
+ if ips is None:
166
+ ips = ""
167
+ else:
168
+ ips.strip(" \r\n")
169
+
170
+ if len(ips) == 0:
171
+ if not settings_ignore_empty_ip_entries:
172
+ pylo.log.error("CSV/Excel at line #{} contains has empty IP address".format(csv_object['*line*']))
173
+ sys.exit(1)
174
+ else:
175
+ csv_object['**not_created_reason**'] = "Empty IP address provided"
176
+ continue
177
+
178
+ ips = ips.rsplit(',')
179
+
180
+ csv_object['**ip_array**'] = []
181
+
182
+ for ip in ips:
183
+ ip = ip.strip(" \r\n")
184
+
185
+ if not pylo.is_valid_ipv4(ip) and not pylo.is_valid_ipv6(ip):
186
+ pylo.log.error("CSV/Excel at line #{} contains invalid IP addresses: '{}'".format(csv_object['*line*'], csv_object['ip']))
187
+ sys.exit(1)
188
+
189
+ csv_object['**ip_array**'].append(ip)
190
+
191
+ if ip not in ip_cache:
192
+ ip_cache[ip] = CollisionItem(from_pce=False, csv_object=csv_object, managed=False)
193
+ else:
194
+ count_duplicate_ip_addresses_in_csv += 1
195
+ csv_object['**not_created_reason**'] = "Duplicate IP address {} found in the PCE".format(ip)
196
+ print(" - IP address {} found in both the PCE and CSV (line #{}, name={}".format(
197
+ ip, csv_object['*line*'], csv_object['name'])
198
+ )
199
+ if not ignore_all_sorts_collisions:
200
+ print("Duplicate IP address {} found in the PCE and CSV/Excel at line #{}. "
201
+ "(look for --options to bypass this if you know what you are doing)"
202
+ .format(ip, csv_object['*line*']))
203
+ sys.exit(1)
204
+ break
205
+
206
+ print(" - Found {} colliding IP addresses from CSV/Excel, they won't be imported".format(count_duplicate_ip_addresses_in_csv))
207
+
208
+ del ip_cache
209
+ print(" * DONE")
210
+ # </editor-fold>
211
+
212
+ # <editor-fold desc="Optional filters parsing">
213
+ print(" * Filtering CSV/Excel based on optional filters...", flush=True)
214
+
215
+ if input_filter_file is None:
216
+ print(" - No filter given (see --help)")
217
+ else:
218
+ print(" - loading Excel/CSV file '{}'... ".format(input_filter_file), end='', flush=True)
219
+ filter_csv_data = pylo.CsvExcelToObject(input_filter_file, csv_filter_fields, strict_headers=True)
220
+ print("OK")
221
+
222
+ # First pass on filters data to ensure that if an IP address is specified then it must be a valid one, it will
223
+ # errors at a later stage
224
+ for filter_from_csv_file in filter_csv_data.objects():
225
+ ip = filter_from_csv_file.get('ip')
226
+ if ip is None:
227
+ continue
228
+ if not pylo.is_valid_ipv4(ip) and not pylo.is_valid_ipv6(ip):
229
+ pylo.log.error("CSV/Excel FILTER file has invalid IP {} at line #{}".format(ip, filter_from_csv_file['*line*']))
230
+
231
+ # Actually apply the filters
232
+ for csv_object in csv_data.objects():
233
+ if '**not_created_reason**' in csv_object:
234
+ continue
235
+
236
+ match_filter = False
237
+ for filter_from_csv_file in filter_csv_data.objects():
238
+
239
+ # 'ip' field based filter
240
+ ip_filter = filter_from_csv_file.get('ip')
241
+ if ip_filter is not None:
242
+ for ip in csv_object['**ip_array**']:
243
+ if ip_filter == ip:
244
+ match_filter = True
245
+ break
246
+
247
+ if match_filter:
248
+ break
249
+
250
+ if not match_filter:
251
+ # for reporting purposes show why it was not created
252
+ csv_object['**not_created_reason**'] = "No match in input filter file"
253
+
254
+ print(" *OK")
255
+ # </editor-fold>
256
+
257
+ # <editor-fold desc="Label collision detection and listing of labels to be created">
258
+ print(" * Checking for Labels case collisions and missing ones to be created:")
259
+ name_cache: Dict[str, Any] = {}
260
+ for label in org.LabelStore.get_labels():
261
+ if label.name is not None:
262
+ lower_name = label.name.lower()
263
+ if lower_name not in name_cache:
264
+ name_cache[lower_name] = {'pce': True, 'realcase': label.name}
265
+ else:
266
+ print(" - Warning duplicate found in the PCE for Label: {}".format(label.name))
267
+
268
+ for csv_object in csv_data.objects():
269
+ if '**not_created_reason**' in csv_object:
270
+ continue
271
+
272
+
273
+ # each label type/dimension we must check which ones are requested and if they exist
274
+ for label_type in org.LabelStore.label_types:
275
+ requested_label_name = csv_object[label_type]
276
+ if requested_label_name is not None:
277
+ pass
278
+ else:
279
+ requested_label_name = str(requested_label_name)
280
+ requested_label_name_lower = requested_label_name.lower()
281
+ if requested_label_name not in name_cache:
282
+ name_cache[requested_label_name_lower] = {'csv': True, 'realcase': requested_label_name, 'type': label_type}
283
+ # type collision
284
+ elif name_cache[requested_label_name_lower]['type'] != label_type:
285
+ if 'csv' in name_cache[requested_label_name_lower]:
286
+ raise pylo.PyloEx("Found duplicate label with name '{}' but different type within the CSV".format(requested_label_name))
287
+ else:
288
+ raise pylo.PyloEx("Found duplicate label with name '{}' but different type between CSV and PCE".format(requested_label_name))
289
+ # case collision
290
+ elif name_cache[requested_label_name_lower]['realcase'] != requested_label_name:
291
+ if 'csv' in name_cache[requested_label_name_lower]:
292
+ raise pylo.PyloEx("Found duplicate label with name '{}' but different case within the CSV".format(requested_label_name))
293
+ else:
294
+ raise pylo.PyloEx("Found duplicate label with name '{}' but different case between CSV and PCE".format(requested_label_name))
295
+
296
+
297
+
298
+ labels_to_be_created: List[Dict] = []
299
+ for label_entry in name_cache.values():
300
+ if 'csv' in label_entry:
301
+ labels_to_be_created.append({'name': label_entry['realcase'], 'type': label_entry['type']})
302
+
303
+ del name_cache
304
+ print(" * DONE")
305
+ # </editor-fold>
306
+
307
+ # <editor-fold desc="Missing Labels creation">
308
+ if len(labels_to_be_created) > 0:
309
+ print(" * {} Labels need to created before Workloads can be imported, listing:".format(len(labels_to_be_created)))
310
+ for label_to_create in labels_to_be_created:
311
+ print(" - {} type {}".format(label_to_create['name'], label_to_create['type']))
312
+
313
+ print(" ** Proceed and create all the {} Labels? (yes/no): ".format(len(labels_to_be_created)), flush=True, end='')
314
+ while True:
315
+ keyboard_input = input()
316
+ keyboard_input = keyboard_input.lower()
317
+ if keyboard_input == 'yes' or keyboard_input == 'y':
318
+ break
319
+ if keyboard_input == 'no' or keyboard_input == 'n':
320
+ sys.exit(0)
321
+ for label_to_create in labels_to_be_created:
322
+ print(" - Pushing '{}' with type '{}' to the PCE... ".format(label_to_create['name'], label_to_create['type']), end='', flush=True)
323
+ org.LabelStore.api_create_label(label_to_create['name'], label_to_create['type'])
324
+ print("OK")
325
+ # </editor-fold>
326
+
327
+ # Listing objects to be created (filtering out inconsistent ones)
328
+ csv_objects_to_create = []
329
+ ignored_objects_count = 0
330
+ for csv_object in csv_data.objects():
331
+ if '**not_created_reason**' not in csv_object:
332
+ csv_objects_to_create.append(csv_object)
333
+ else:
334
+ ignored_objects_count += 1
335
+
336
+ # <editor-fold desc="JSON Payloads generation">
337
+ print(' * Preparing Workloads JSON payloads...')
338
+ workloads_json_data = []
339
+ for data in csv_objects_to_create:
340
+ new_workload: WorkloadObjectCreateJsonStructure = {}
341
+ workloads_json_data.append(new_workload)
342
+
343
+ if len(data['name']) > 0:
344
+ new_workload['name'] = data['name']
345
+
346
+ if len(data['hostname']) < 1:
347
+ raise pylo.PyloEx('Workload at line #{} is missing a hostname in CSV'.format(data['*line*']))
348
+ else:
349
+ new_workload['hostname'] = data['hostname']
350
+
351
+ new_workload['labels'] = []
352
+
353
+ found_role_label = org.LabelStore.find_label_by_name_and_type(data['role'], pylo.label_type_role)
354
+ if found_role_label is not None:
355
+ new_workload['labels'].append({'href': found_role_label.href})
356
+
357
+ found_app_label = org.LabelStore.find_label_by_name_and_type(data['app'], pylo.label_type_app)
358
+ if found_app_label is not None:
359
+ new_workload['labels'].append({'href': found_app_label.href})
360
+
361
+ found_env_label = org.LabelStore.find_label_by_name_and_type(data['env'], pylo.label_type_env)
362
+ if found_env_label is not None:
363
+ new_workload['labels'].append({'href': found_env_label.href})
364
+
365
+ found_loc_label = org.LabelStore.find_label_by_name_and_type(data['loc'], pylo.label_type_loc)
366
+ if found_loc_label is not None:
367
+ new_workload['labels'].append({'href': found_loc_label.href})
368
+
369
+ if len(data['description']) > 0:
370
+ new_workload['description'] = data['description']
371
+
372
+ if len(data['**ip_array**']) < 1:
373
+ pylo.log.error('CSV/Excel workload at line #{} has no valid ip address defined'.format(data['*line*']))
374
+ sys.exit(1)
375
+
376
+ new_workload['public_ip'] = data['**ip_array**'][0]
377
+ new_workload['interfaces'] = []
378
+ for ip in data['**ip_array**']:
379
+ new_workload['interfaces'].append({"name": "eth0", "address": ip})
380
+
381
+ print(" * DONE")
382
+ # </editor-fold>
383
+
384
+ # <editor-fold desc="Unmanaged Workloads PUSH to API">
385
+ print(" * Creating {} Unmanaged Workloads in batches of {}".format(len(workloads_json_data), batch_size))
386
+ batch_cursor = 0
387
+ total_created_count = 0
388
+ total_failed_count = 0
389
+ while batch_cursor <= len(workloads_json_data):
390
+ print(" - batch #{} of {}".format(math.ceil(batch_cursor/batch_size)+1, math.ceil(len(workloads_json_data)/batch_size)))
391
+ batch_json_data = workloads_json_data[batch_cursor:batch_cursor+batch_size-1]
392
+ results = org.connector.objects_workload_create_bulk_unmanaged(batch_json_data)
393
+ created_count = 0
394
+ failed_count = 0
395
+
396
+ for i in range(0, batch_size):
397
+ if i >= len(batch_json_data):
398
+ break
399
+ result = results[i]
400
+ if result['status'] != 'created':
401
+ csv_objects_to_create[i + batch_cursor]['**not_created_reason**'] = result['message']
402
+ failed_count += 1
403
+ total_failed_count += 1
404
+ else:
405
+ csv_objects_to_create[i + batch_cursor]['href'] = result['href']
406
+ created_count += 1
407
+ total_created_count += 1
408
+
409
+ print(" - {} created with success, {} failures (read report to get reasons)".format(created_count, failed_count))
410
+ csv_data.save_to_csv(output_file_csv, csv_created_fields)
411
+ csv_data.save_to_excel(output_file_excel, csv_created_fields)
412
+
413
+ batch_cursor += batch_size
414
+ # </editor-fold>
415
+
416
+ csv_data.save_to_csv(output_file_csv, csv_created_fields)
417
+ csv_data.save_to_excel(output_file_excel, csv_created_fields)
418
+
419
+ print(" * DONE - {} created with success, {} failures and {} ignored. A report was created in {} and {}".format(
420
+ total_created_count, total_failed_count, ignored_objects_count, output_file_csv, output_file_excel))
421
+
422
+
423
+ command_object = Command(command_name, __main, fill_parser, objects_load_filter)