illumio-pylo 0.3.11__py3-none-any.whl → 0.3.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- illumio_pylo/API/APIConnector.py +138 -106
- illumio_pylo/API/CredentialsManager.py +168 -3
- illumio_pylo/API/Explorer.py +619 -14
- illumio_pylo/API/JsonPayloadTypes.py +64 -4
- illumio_pylo/FilterQuery.py +892 -0
- illumio_pylo/Helpers/exports.py +1 -1
- illumio_pylo/LabelCommon.py +13 -3
- illumio_pylo/LabelDimension.py +109 -0
- illumio_pylo/LabelStore.py +97 -38
- illumio_pylo/WorkloadStore.py +58 -0
- illumio_pylo/__init__.py +9 -3
- illumio_pylo/cli/__init__.py +5 -2
- illumio_pylo/cli/commands/__init__.py +1 -0
- illumio_pylo/cli/commands/credential_manager.py +555 -4
- illumio_pylo/cli/commands/label_delete_unused.py +0 -3
- illumio_pylo/cli/commands/traffic_export.py +358 -0
- illumio_pylo/cli/commands/ui/credential_manager_ui/app.js +638 -0
- illumio_pylo/cli/commands/ui/credential_manager_ui/index.html +217 -0
- illumio_pylo/cli/commands/ui/credential_manager_ui/styles.css +581 -0
- illumio_pylo/cli/commands/update_pce_objects_cache.py +1 -2
- illumio_pylo/cli/commands/ven_duplicate_remover.py +79 -59
- illumio_pylo/cli/commands/workload_export.py +29 -0
- illumio_pylo/utilities/cli.py +4 -1
- illumio_pylo/utilities/health_monitoring.py +5 -1
- {illumio_pylo-0.3.11.dist-info → illumio_pylo-0.3.13.dist-info}/METADATA +2 -1
- {illumio_pylo-0.3.11.dist-info → illumio_pylo-0.3.13.dist-info}/RECORD +29 -24
- {illumio_pylo-0.3.11.dist-info → illumio_pylo-0.3.13.dist-info}/WHEEL +1 -1
- illumio_pylo/Query.py +0 -331
- {illumio_pylo-0.3.11.dist-info → illumio_pylo-0.3.13.dist-info}/licenses/LICENSE +0 -0
- {illumio_pylo-0.3.11.dist-info → illumio_pylo-0.3.13.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,358 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
import os
|
|
4
|
+
from typing import Dict, List, Literal
|
|
5
|
+
from zoneinfo import ZoneInfo
|
|
6
|
+
|
|
7
|
+
import illumio_pylo as pylo
|
|
8
|
+
from illumio_pylo import ArraysToExcel, ExcelHeader, ExplorerResultV2
|
|
9
|
+
from .utils.misc import make_filename_with_timestamp
|
|
10
|
+
from . import Command
|
|
11
|
+
|
|
12
|
+
command_name = 'traffic-export'
|
|
13
|
+
objects_load_filter: List[pylo.ObjectTypes] = ['labels', 'labelgroups', 'iplists', 'services']
|
|
14
|
+
|
|
15
|
+
# Base column definitions for traffic export
|
|
16
|
+
BASE_COLUMNS = ['src_ip', 'src_iplist', 'src_workload']
|
|
17
|
+
DST_BASE_COLUMNS = ['dst_ip', 'dst_iplist', 'dst_workload']
|
|
18
|
+
SERVICE_COLUMNS = ['protocol', 'port']
|
|
19
|
+
POLICY_COLUMNS = ['policy_decision', 'draft_policy_decision']
|
|
20
|
+
TIME_COLUMNS = ['first_detected', 'last_detected']
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _generate_omit_columns_help() -> str:
|
|
24
|
+
"""Generate help text for --omit-columns with all available base columns."""
|
|
25
|
+
base_cols = ', '.join(BASE_COLUMNS + DST_BASE_COLUMNS + SERVICE_COLUMNS + POLICY_COLUMNS + TIME_COLUMNS)
|
|
26
|
+
return (f'Column names to omit from the export (e.g., protocol, port). '
|
|
27
|
+
f'Base columns: {base_cols}. '
|
|
28
|
+
f'Label columns are added dynamically based on label types (e.g., src_app, dst_env, etc.). '
|
|
29
|
+
f'When --consolidate-labels is used, src_labels and dst_labels are available instead of individual label columns.')
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def fill_parser(parser: argparse.ArgumentParser):
|
|
33
|
+
parser.description = "Export traffic records from the PCE based on specified filters and settings."
|
|
34
|
+
|
|
35
|
+
parser.add_argument('--format', '-f', required=False, default='excel', choices=['csv', 'excel'],
|
|
36
|
+
help='Output file format')
|
|
37
|
+
parser.add_argument('--output-dir', '-o', required=False, default='output',
|
|
38
|
+
help='Directory where to save the output file')
|
|
39
|
+
|
|
40
|
+
parser.add_argument('--source-filters', '-sf', required=False, type=str, nargs='+', default=None,
|
|
41
|
+
help='Source filters to apply (e.g. label:Web, iplist:Private_Networks)')
|
|
42
|
+
parser.add_argument('--destination-filters', '-df', required=False, type=str, nargs='*', default=None,
|
|
43
|
+
help='Destination filters to apply (e.g. label:DB, iplist:Public_NATed)')
|
|
44
|
+
|
|
45
|
+
parser.add_argument('--since-timestamp', '-st', required=False, type=str, default=None,
|
|
46
|
+
help='Export traffic records since this timestamp (ISO 8601 format)')
|
|
47
|
+
parser.add_argument('--until-timestamp', '-ut', required=False, type=str, default=None,
|
|
48
|
+
help='Export traffic records until this timestamp (ISO 8601 format)')
|
|
49
|
+
parser.add_argument('--timeframe-hours', '-tfh', required=False, type=int, default=None,
|
|
50
|
+
help='Export traffic records from the last X hours (overrides --since-timestamp and --until-timestamp)')
|
|
51
|
+
parser.add_argument('--records-count-limit', '-rl', required=False, type=int, default=10000,
|
|
52
|
+
help='Maximum number of records to export')
|
|
53
|
+
|
|
54
|
+
parser.add_argument('--draft-mode-enabled', '-dme', action='store_true', required=False, default=False,
|
|
55
|
+
help='Enable draft mode to recalculate policy decisions based on draft rules')
|
|
56
|
+
parser.add_argument('--protocol-names', '-pn', action='store_true', required=False, default=False,
|
|
57
|
+
help='Translate common protocol numbers to names (e.g., 6 -> TCP) before export')
|
|
58
|
+
parser.add_argument('--timezone', '-tz', required=False, type=str, default=None,
|
|
59
|
+
help='Convert timestamps to this timezone (e.g., America/New_York, Europe/Paris). If not specified, timestamps remain in UTC.')
|
|
60
|
+
parser.add_argument('--consolidate-labels', '-cl', action='store_true', required=False, default=False,
|
|
61
|
+
help='Consolidate all workload labels into a single column (src_labels, dst_labels) as comma-separated values, ordered by label types')
|
|
62
|
+
parser.add_argument('--label-separator', '-ls', required=False, type=str, default=',',
|
|
63
|
+
help='Separator to use when consolidating labels (default: ","). Only applies when --consolidate-labels is enabled. Examples: ", ", " ", "|", ";"')
|
|
64
|
+
parser.add_argument('--disable-wrap-text', '-dwt', action='store_true', required=False, default=False,
|
|
65
|
+
help='Disable text wrapping for all report columns (enabled by default)')
|
|
66
|
+
parser.add_argument('--omit-columns', '-oc', required=False, type=str, nargs='+', default=None,
|
|
67
|
+
help=_generate_omit_columns_help())
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def __main(args: Dict, org: pylo.Organization, **kwargs):
|
|
71
|
+
settings_output_file_format: Literal['csv', 'excel'] = args['format']
|
|
72
|
+
settings_output_dir: str = args['output_dir']
|
|
73
|
+
settings_source_filters: List[str] | None = args['source_filters']
|
|
74
|
+
settings_destination_filters: List[str] | None = args['destination_filters']
|
|
75
|
+
settings_since_timestamp: str | None = args['since_timestamp']
|
|
76
|
+
settings_until_timestamp: str | None = args['until_timestamp']
|
|
77
|
+
settings_timeframe_hours: int | None = args['timeframe_hours']
|
|
78
|
+
settings_records_count_limit: int = args['records_count_limit']
|
|
79
|
+
settings_draft_mode_enabled: bool = args['draft_mode_enabled']
|
|
80
|
+
settings_protocol_names: bool = args['protocol_names']
|
|
81
|
+
settings_timezone: str | None = args['timezone']
|
|
82
|
+
settings_consolidate_labels: bool = args['consolidate_labels']
|
|
83
|
+
settings_label_separator: str = args['label_separator']
|
|
84
|
+
settings_disable_wrap_text: bool = args['disable_wrap_text']
|
|
85
|
+
settings_omit_columns: List[str] | None = args['omit_columns']
|
|
86
|
+
|
|
87
|
+
explorer_query = org.connector.new_explorer_query_v2(max_results=settings_records_count_limit, draft_mode_enabled=settings_draft_mode_enabled)
|
|
88
|
+
|
|
89
|
+
def _apply_filters(filter_values: List[str] | None, filter_set: pylo.ExplorerFilterSetV2, descriptor: Literal['source', 'destination']):
|
|
90
|
+
valid_filter_prefixes = ['label:', 'iplist:']
|
|
91
|
+
if filter_values is None:
|
|
92
|
+
return
|
|
93
|
+
|
|
94
|
+
for filter_value in filter_values:
|
|
95
|
+
# a single filter may be made of multiple comma-separated values which will be processed individually
|
|
96
|
+
value_parts = [part.strip() for part in filter_value.split(',') if part.strip() != '']
|
|
97
|
+
if descriptor=='source':
|
|
98
|
+
filter = filter_set.new_source_filter()
|
|
99
|
+
else:
|
|
100
|
+
filter = filter_set.new_destination_filter()
|
|
101
|
+
|
|
102
|
+
for filter_item_string in value_parts:
|
|
103
|
+
if filter_item_string.startswith('label:'):
|
|
104
|
+
label_name = filter_item_string[len('label:'):]
|
|
105
|
+
label_search_result: List[pylo.Label] | None = org.LabelStore.find_label_by_name(label_name,
|
|
106
|
+
raise_exception_if_not_found=False,
|
|
107
|
+
case_sensitive=False)
|
|
108
|
+
if len(label_search_result) == 0:
|
|
109
|
+
raise pylo.PyloEx(f"Label '{label_name}' not found in PCE!")
|
|
110
|
+
elif len(label_search_result) > 1:
|
|
111
|
+
raise pylo.PyloEx(f"Multiple labels found for name '{label_name}', please use a more specific name or enable case sensitivity!")
|
|
112
|
+
|
|
113
|
+
filter.add_label(label_search_result[0])
|
|
114
|
+
|
|
115
|
+
elif filter_item_string.startswith('iplist:'):
|
|
116
|
+
iplist_name = filter_item_string[len('iplist:'):]
|
|
117
|
+
iplist_obj = org.IPListStore.find_by_name(iplist_name)
|
|
118
|
+
if iplist_obj is None:
|
|
119
|
+
raise pylo.PyloEx(f"IPList '{iplist_name}' not found in PCE!")
|
|
120
|
+
filter.add_iplist(iplist_obj)
|
|
121
|
+
else:
|
|
122
|
+
raise pylo.PyloEx(f"Invalid {descriptor} filter format: '{filter_item_string}', valid prefixes are: {valid_filter_prefixes}")
|
|
123
|
+
|
|
124
|
+
# Processing time filters
|
|
125
|
+
if settings_timeframe_hours is not None:
|
|
126
|
+
if settings_since_timestamp is not None or settings_until_timestamp is not None:
|
|
127
|
+
raise pylo.PyloEx("--timeframe-hours cannot be used together with --since-timestamp or --until-timestamp")
|
|
128
|
+
explorer_query.filters.set_time_from_x_seconds_ago(settings_timeframe_hours * 3600)
|
|
129
|
+
else:
|
|
130
|
+
if settings_since_timestamp is not None:
|
|
131
|
+
try:
|
|
132
|
+
explorer_query.filters.set_time_from(datetime.fromisoformat(settings_since_timestamp))
|
|
133
|
+
except ValueError:
|
|
134
|
+
raise pylo.PyloEx("Invalid --since-timestamp format, please use ISO 8601 format")
|
|
135
|
+
else:
|
|
136
|
+
raise pylo.PyloEx("Either --since-timestamp or --timeframe-hours must be provided")
|
|
137
|
+
|
|
138
|
+
if settings_until_timestamp is not None:
|
|
139
|
+
try:
|
|
140
|
+
explorer_query.filters.set_time_to(datetime.fromisoformat(settings_until_timestamp))
|
|
141
|
+
except ValueError:
|
|
142
|
+
raise pylo.PyloEx("Invalid --until-timestamp format, please use ISO 8601 format")
|
|
143
|
+
|
|
144
|
+
# Processing source filters
|
|
145
|
+
_apply_filters(settings_source_filters, explorer_query.filters, 'source')
|
|
146
|
+
|
|
147
|
+
# Processing destination filters
|
|
148
|
+
_apply_filters(settings_destination_filters, explorer_query.filters, 'destination')
|
|
149
|
+
|
|
150
|
+
print("Executing and downloading traffic export query... ", flush=True, end='')
|
|
151
|
+
query_results = explorer_query.execute()
|
|
152
|
+
print("DONE")
|
|
153
|
+
|
|
154
|
+
print("Processing traffic records... ", flush=True, end='')
|
|
155
|
+
records: List[ExplorerResultV2] = query_results.get_all_records()
|
|
156
|
+
print(f"DONE - {len(records)} records retrieved")
|
|
157
|
+
|
|
158
|
+
# Get label types from the organization
|
|
159
|
+
label_types = org.LabelStore.label_types
|
|
160
|
+
|
|
161
|
+
# Define base columns and dynamically add label columns
|
|
162
|
+
if settings_consolidate_labels:
|
|
163
|
+
# Use consolidated label columns
|
|
164
|
+
src_label_columns = ['src_labels']
|
|
165
|
+
dst_label_columns = ['dst_labels']
|
|
166
|
+
else:
|
|
167
|
+
# Use individual label columns
|
|
168
|
+
src_label_columns = [f'src_{label_type}' for label_type in label_types]
|
|
169
|
+
dst_label_columns = [f'dst_{label_type}' for label_type in label_types]
|
|
170
|
+
|
|
171
|
+
# Build policy columns, excluding draft_policy_decision if draft mode is not enabled
|
|
172
|
+
policy_columns = POLICY_COLUMNS.copy()
|
|
173
|
+
if not settings_draft_mode_enabled:
|
|
174
|
+
policy_columns = [col for col in policy_columns if col != 'draft_policy_decision']
|
|
175
|
+
|
|
176
|
+
# Construct all columns in the correct order:
|
|
177
|
+
# src_ip, src_workload, src_labels, dst_ip, dst_workload, dst_labels, protocol, port, policy_decision, [draft_policy_decision], first_detected, last_detected
|
|
178
|
+
all_columns = (BASE_COLUMNS + src_label_columns + DST_BASE_COLUMNS + dst_label_columns +
|
|
179
|
+
SERVICE_COLUMNS + policy_columns + TIME_COLUMNS)
|
|
180
|
+
|
|
181
|
+
# Process omit-columns setting
|
|
182
|
+
columns_to_include = all_columns.copy()
|
|
183
|
+
if settings_omit_columns is not None:
|
|
184
|
+
# Validate column names
|
|
185
|
+
omit_columns_lower = [col.lower() for col in settings_omit_columns]
|
|
186
|
+
invalid_columns = [col for col in omit_columns_lower if col not in all_columns]
|
|
187
|
+
if invalid_columns:
|
|
188
|
+
raise pylo.PyloEx(f"Invalid column names in --omit-columns: {invalid_columns}. Available columns: {all_columns}")
|
|
189
|
+
|
|
190
|
+
# Remove omitted columns
|
|
191
|
+
columns_to_include = [col for col in all_columns if col not in omit_columns_lower]
|
|
192
|
+
|
|
193
|
+
# Ensure at least one column remains
|
|
194
|
+
if len(columns_to_include) == 0:
|
|
195
|
+
raise pylo.PyloEx("Cannot omit all columns. At least one column must be included in the export.")
|
|
196
|
+
|
|
197
|
+
# Validate timezone if provided
|
|
198
|
+
target_timezone = None
|
|
199
|
+
if settings_timezone is not None:
|
|
200
|
+
try:
|
|
201
|
+
target_timezone = ZoneInfo(settings_timezone)
|
|
202
|
+
except Exception as e:
|
|
203
|
+
raise pylo.PyloEx(f"Invalid timezone '{settings_timezone}': {e}")
|
|
204
|
+
|
|
205
|
+
# Build headers based on columns to include
|
|
206
|
+
header_definitions = {
|
|
207
|
+
'src_ip': ExcelHeader(name='src_ip', max_width=18),
|
|
208
|
+
'src_iplist': ExcelHeader(name='src_iplist', max_width=40),
|
|
209
|
+
'src_workload': ExcelHeader(name='src_workload', max_width=30),
|
|
210
|
+
'dst_ip': ExcelHeader(name='dst_ip', max_width=18),
|
|
211
|
+
'dst_iplist': ExcelHeader(name='dst_iplist', max_width=40),
|
|
212
|
+
'dst_workload': ExcelHeader(name='dst_workload', max_width=30),
|
|
213
|
+
'protocol': ExcelHeader(name='protocol', max_width=18),
|
|
214
|
+
'port': ExcelHeader(name='port', max_width=12),
|
|
215
|
+
'policy_decision': ExcelHeader(name='policy_decision', max_width=20),
|
|
216
|
+
'first_detected': ExcelHeader(name='first_detected', max_width=22),
|
|
217
|
+
'last_detected': ExcelHeader(name='last_detected', max_width=22),
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
# Add a draft_policy_decision header only if draft mode is enabled
|
|
221
|
+
if settings_draft_mode_enabled:
|
|
222
|
+
header_definitions['draft_policy_decision'] = ExcelHeader(name='draft_policy_decision', max_width=25)
|
|
223
|
+
|
|
224
|
+
# Add dynamic label column headers
|
|
225
|
+
if settings_consolidate_labels:
|
|
226
|
+
header_definitions['src_labels'] = ExcelHeader(name='src_labels', max_width=50)
|
|
227
|
+
header_definitions['dst_labels'] = ExcelHeader(name='dst_labels', max_width=50)
|
|
228
|
+
else:
|
|
229
|
+
for label_type in label_types:
|
|
230
|
+
header_definitions[f'src_{label_type}'] = ExcelHeader(name=f'src_{label_type}', max_width=25)
|
|
231
|
+
header_definitions[f'dst_{label_type}'] = ExcelHeader(name=f'dst_{label_type}', max_width=25)
|
|
232
|
+
|
|
233
|
+
csv_report_headers = pylo.ExcelHeaderSet([
|
|
234
|
+
header_definitions[col] for col in columns_to_include
|
|
235
|
+
])
|
|
236
|
+
csv_report = ArraysToExcel()
|
|
237
|
+
sheet = csv_report.create_sheet(
|
|
238
|
+
'traffic',
|
|
239
|
+
csv_report_headers,
|
|
240
|
+
force_all_wrap_text=not settings_disable_wrap_text,
|
|
241
|
+
multivalues_cell_delimiter=','
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
def _protocol_display(proto: str | int | None) -> str | int | None:
|
|
245
|
+
"""Return a human-readable protocol name when known; otherwise the original value."""
|
|
246
|
+
if proto is None:
|
|
247
|
+
return None
|
|
248
|
+
# Accept ints or numeric strings; fallback to original value on conversion issues.
|
|
249
|
+
try:
|
|
250
|
+
proto_int = int(proto)
|
|
251
|
+
except (ValueError, TypeError):
|
|
252
|
+
return proto
|
|
253
|
+
|
|
254
|
+
common_protocols = {
|
|
255
|
+
1: 'ICMP',
|
|
256
|
+
6: 'TCP',
|
|
257
|
+
17: 'UDP',
|
|
258
|
+
50: 'ESP',
|
|
259
|
+
51: 'AH',
|
|
260
|
+
132: 'SCTP'
|
|
261
|
+
}
|
|
262
|
+
return common_protocols.get(proto_int, proto)
|
|
263
|
+
|
|
264
|
+
def _convert_timestamp(timestamp_str: str | None, target_tz: ZoneInfo | None) -> str | None:
|
|
265
|
+
"""Convert UTC ISO 8601 timestamp to target timezone if specified, otherwise return as-is."""
|
|
266
|
+
if timestamp_str is None or target_tz is None:
|
|
267
|
+
return timestamp_str
|
|
268
|
+
|
|
269
|
+
try:
|
|
270
|
+
# Parse the UTC timestamp
|
|
271
|
+
dt = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
|
|
272
|
+
# Convert to the target timezone
|
|
273
|
+
dt_converted = dt.astimezone(target_tz)
|
|
274
|
+
# Return as ISO 8601 string
|
|
275
|
+
return dt_converted.isoformat()
|
|
276
|
+
except Exception:
|
|
277
|
+
# If conversion fails, return original
|
|
278
|
+
return timestamp_str
|
|
279
|
+
|
|
280
|
+
def _format_iplists(iplists: Dict[str, pylo.IPList]) -> str | None:
|
|
281
|
+
if not iplists:
|
|
282
|
+
return None
|
|
283
|
+
names: List[str] = []
|
|
284
|
+
for iplist in iplists.values():
|
|
285
|
+
if iplist.name:
|
|
286
|
+
names.append(iplist.name)
|
|
287
|
+
else:
|
|
288
|
+
names.append(iplist.href)
|
|
289
|
+
if not names:
|
|
290
|
+
return None
|
|
291
|
+
return ','.join(sorted(set(names), key=str.lower))
|
|
292
|
+
|
|
293
|
+
for record in records:
|
|
294
|
+
# Build a full record with all columns
|
|
295
|
+
full_record_to_export = {
|
|
296
|
+
'src_ip': record.source_ip,
|
|
297
|
+
'src_iplist': _format_iplists(record.get_source_iplists(org)),
|
|
298
|
+
'src_workload': record.source_workload_hostname,
|
|
299
|
+
'dst_ip': record.destination_ip,
|
|
300
|
+
'dst_iplist': _format_iplists(record.get_destination_iplists(org)),
|
|
301
|
+
'dst_workload': record.destination_workload_hostname,
|
|
302
|
+
'protocol': _protocol_display(record.service_protocol) if settings_protocol_names else record.service_protocol,
|
|
303
|
+
'port': record.service_port,
|
|
304
|
+
'policy_decision': record.policy_decision_string,
|
|
305
|
+
'draft_policy_decision': record.draft_mode_policy_decision_to_str() if settings_draft_mode_enabled else None,
|
|
306
|
+
'first_detected': _convert_timestamp(record.first_detected, target_timezone),
|
|
307
|
+
'last_detected': _convert_timestamp(record.last_detected, target_timezone),
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
# Add source workload labels
|
|
311
|
+
if settings_consolidate_labels:
|
|
312
|
+
# Consolidate all labels into a single comma-separated string, ordered by label types
|
|
313
|
+
if record.source_workload_href:
|
|
314
|
+
src_label_values = [record.source_workload_labels_by_type.get(label_type) for label_type in label_types]
|
|
315
|
+
src_label_values = [lv for lv in src_label_values if lv is not None]
|
|
316
|
+
full_record_to_export['src_labels'] = settings_label_separator.join(src_label_values) if src_label_values else None
|
|
317
|
+
else:
|
|
318
|
+
full_record_to_export['src_labels'] = None
|
|
319
|
+
else:
|
|
320
|
+
for label_type in label_types:
|
|
321
|
+
full_record_to_export[f'src_{label_type}'] = record.source_workload_labels_by_type.get(label_type) if record.source_workload_href else None
|
|
322
|
+
|
|
323
|
+
# Add destination workload labels
|
|
324
|
+
if settings_consolidate_labels:
|
|
325
|
+
# Consolidate all labels into a single comma-separated string, ordered by label types
|
|
326
|
+
if record.destination_workload_href:
|
|
327
|
+
dst_label_values = [record.destination_workload_labels_by_type.get(label_type) for label_type in label_types]
|
|
328
|
+
dst_label_values = [lv for lv in dst_label_values if lv is not None]
|
|
329
|
+
full_record_to_export['dst_labels'] = settings_label_separator.join(dst_label_values) if dst_label_values else None
|
|
330
|
+
else:
|
|
331
|
+
full_record_to_export['dst_labels'] = None
|
|
332
|
+
else:
|
|
333
|
+
for label_type in label_types:
|
|
334
|
+
full_record_to_export[f'dst_{label_type}'] = record.destination_workload_labels_by_type.get(label_type) if record.destination_workload_href else None
|
|
335
|
+
|
|
336
|
+
# Filter to include only selected columns
|
|
337
|
+
csv_record = {col: full_record_to_export[col] for col in columns_to_include}
|
|
338
|
+
sheet.add_line_from_object(csv_record)
|
|
339
|
+
|
|
340
|
+
if sheet.lines_count() < 1:
|
|
341
|
+
print("No traffic records matched the filters; nothing to export.")
|
|
342
|
+
return
|
|
343
|
+
|
|
344
|
+
os.makedirs(settings_output_dir, exist_ok=True)
|
|
345
|
+
output_filename_base = make_filename_with_timestamp('traffic-export_', settings_output_dir)
|
|
346
|
+
|
|
347
|
+
if settings_output_file_format == 'csv':
|
|
348
|
+
output_filename = output_filename_base + '.csv'
|
|
349
|
+
print(f"Writing CSV report to '{output_filename}' ... ", end='', flush=True)
|
|
350
|
+
sheet.write_to_csv(output_filename)
|
|
351
|
+
else:
|
|
352
|
+
output_filename = output_filename_base + '.xlsx'
|
|
353
|
+
print(f"Writing Excel report to '{output_filename}' ... ", end='', flush=True)
|
|
354
|
+
csv_report.write_to_excel(output_filename)
|
|
355
|
+
print("DONE")
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
command_object = Command(command_name, __main, fill_parser, load_specific_objects_only=objects_load_filter)
|