illumio-pylo 0.3.12__py3-none-any.whl → 0.3.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- illumio_pylo/API/APIConnector.py +61 -14
- illumio_pylo/API/CredentialsManager.py +130 -3
- illumio_pylo/API/Explorer.py +619 -14
- illumio_pylo/API/JsonPayloadTypes.py +64 -4
- illumio_pylo/FilterQuery.py +892 -0
- illumio_pylo/LabelCommon.py +13 -3
- illumio_pylo/LabelDimension.py +109 -0
- illumio_pylo/LabelStore.py +97 -38
- illumio_pylo/WorkloadStore.py +58 -0
- illumio_pylo/__init__.py +9 -3
- illumio_pylo/cli/__init__.py +5 -2
- illumio_pylo/cli/commands/__init__.py +1 -0
- illumio_pylo/cli/commands/credential_manager.py +176 -0
- illumio_pylo/cli/commands/traffic_export.py +358 -0
- illumio_pylo/cli/commands/ui/credential_manager_ui/app.js +191 -2
- illumio_pylo/cli/commands/ui/credential_manager_ui/index.html +50 -1
- illumio_pylo/cli/commands/ui/credential_manager_ui/styles.css +179 -28
- illumio_pylo/cli/commands/update_pce_objects_cache.py +1 -2
- illumio_pylo/cli/commands/workload_export.py +29 -0
- {illumio_pylo-0.3.12.dist-info → illumio_pylo-0.3.13.dist-info}/METADATA +1 -1
- {illumio_pylo-0.3.12.dist-info → illumio_pylo-0.3.13.dist-info}/RECORD +24 -22
- illumio_pylo/Query.py +0 -331
- {illumio_pylo-0.3.12.dist-info → illumio_pylo-0.3.13.dist-info}/WHEEL +0 -0
- {illumio_pylo-0.3.12.dist-info → illumio_pylo-0.3.13.dist-info}/licenses/LICENSE +0 -0
- {illumio_pylo-0.3.12.dist-info → illumio_pylo-0.3.13.dist-info}/top_level.txt +0 -0
|
@@ -64,6 +64,11 @@ def fill_parser(parser: argparse.ArgumentParser):
|
|
|
64
64
|
delete_parser.add_argument('--yes', '-y', action='store_true', default=False,
|
|
65
65
|
help='Skip confirmation prompt')
|
|
66
66
|
|
|
67
|
+
# Encrypt sub-command
|
|
68
|
+
encrypt_parser = sub_parser.add_parser('encrypt', help='Encrypt an existing credential API key')
|
|
69
|
+
encrypt_parser.add_argument('--name', required=False, type=str, default=None,
|
|
70
|
+
help='Name of the credential to encrypt')
|
|
71
|
+
|
|
67
72
|
# Web editor sub-command
|
|
68
73
|
web_editor_parser = sub_parser.add_parser('web-editor', help='Start web-based credential editor')
|
|
69
74
|
web_editor_parser.add_argument('--host', required=False, type=str, default='127.0.0.1',
|
|
@@ -317,6 +322,79 @@ def __main(args, **kwargs):
|
|
|
317
322
|
connector.objects_label_dimension_get()
|
|
318
323
|
print("OK!")
|
|
319
324
|
|
|
325
|
+
elif args['sub_command'] == 'encrypt':
|
|
326
|
+
# Check if encryption is available first
|
|
327
|
+
if not is_encryption_available():
|
|
328
|
+
print("Encryption is not available. Please ensure an SSH agent is running with RSA or Ed25519 keys added.")
|
|
329
|
+
sys.exit(1)
|
|
330
|
+
|
|
331
|
+
# if name is not provided, prompt for it
|
|
332
|
+
wanted_name = args['name']
|
|
333
|
+
if wanted_name is None:
|
|
334
|
+
wanted_name = click.prompt('> Input a Profile Name to encrypt (ie: prod-pce)', type=str)
|
|
335
|
+
|
|
336
|
+
# find the credential by name
|
|
337
|
+
found_profile = get_credentials_from_file(wanted_name, fail_with_an_exception=False)
|
|
338
|
+
if found_profile is None:
|
|
339
|
+
print("Cannot find a profile named '{}'".format(wanted_name))
|
|
340
|
+
print("Available profiles:")
|
|
341
|
+
credentials = get_all_credentials()
|
|
342
|
+
for credential in credentials:
|
|
343
|
+
print(" - {}".format(credential.name))
|
|
344
|
+
sys.exit(1)
|
|
345
|
+
|
|
346
|
+
print("Found profile '{}' in file '{}'".format(found_profile.name, found_profile.originating_file))
|
|
347
|
+
print(" - FQDN: {}".format(found_profile.fqdn))
|
|
348
|
+
print(" - Port: {}".format(found_profile.port))
|
|
349
|
+
print(" - Org ID: {}".format(found_profile.org_id))
|
|
350
|
+
print(" - API User: {}".format(found_profile.api_user))
|
|
351
|
+
|
|
352
|
+
# Check if already encrypted
|
|
353
|
+
if found_profile.is_api_key_encrypted():
|
|
354
|
+
print("ERROR: The API key for profile '{}' is already encrypted.".format(found_profile.name))
|
|
355
|
+
sys.exit(1)
|
|
356
|
+
|
|
357
|
+
print()
|
|
358
|
+
print("Available SSH keys (ECDSA NISTPXXX keys and a few others are not supported and will be filtered out):")
|
|
359
|
+
ssh_keys = get_supported_keys_from_ssh_agent()
|
|
360
|
+
|
|
361
|
+
if len(ssh_keys) == 0:
|
|
362
|
+
print("No supported SSH keys found in the agent.")
|
|
363
|
+
sys.exit(1)
|
|
364
|
+
|
|
365
|
+
# display a table of keys
|
|
366
|
+
print_keys(keys=ssh_keys, display_index=True)
|
|
367
|
+
print()
|
|
368
|
+
|
|
369
|
+
index_of_selected_key = click.prompt('> Select key by ID#', type=click.IntRange(0, len(ssh_keys)-1))
|
|
370
|
+
selected_ssh_key = ssh_keys[index_of_selected_key]
|
|
371
|
+
print("Selected key: {} | {} | {}".format(selected_ssh_key.get_name(),
|
|
372
|
+
selected_ssh_key.get_fingerprint().hex(),
|
|
373
|
+
selected_ssh_key.comment))
|
|
374
|
+
print(" * encrypting API key with selected key (you may be prompted by your SSH agent for confirmation or PIN code) ...", flush=True, end="")
|
|
375
|
+
encrypted_api_key = encrypt_api_key_with_paramiko_ssh_key_chacha20poly1305(ssh_key=selected_ssh_key, api_key=found_profile.api_key)
|
|
376
|
+
print("OK!")
|
|
377
|
+
print(" * trying to decrypt the encrypted API key...", flush=True, end="")
|
|
378
|
+
decrypted_api_key = decrypt_api_key_with_paramiko_ssh_key_chacha20poly1305(encrypted_api_key_payload=encrypted_api_key)
|
|
379
|
+
if decrypted_api_key != found_profile.api_key:
|
|
380
|
+
raise pylo.PyloEx("Decrypted API key does not match original API key")
|
|
381
|
+
print("OK!")
|
|
382
|
+
|
|
383
|
+
credentials_data: CredentialFileEntry = {
|
|
384
|
+
"name": found_profile.name,
|
|
385
|
+
"fqdn": found_profile.fqdn,
|
|
386
|
+
"port": found_profile.port,
|
|
387
|
+
"org_id": found_profile.org_id,
|
|
388
|
+
"api_user": found_profile.api_user,
|
|
389
|
+
"verify_ssl": found_profile.verify_ssl,
|
|
390
|
+
"api_key": encrypted_api_key
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
print("* Updating credential in file '{}'...".format(found_profile.originating_file), flush=True, end="")
|
|
394
|
+
create_credential_in_file(file_full_path=found_profile.originating_file, data=credentials_data, overwrite_existing_profile=True)
|
|
395
|
+
print("OK!")
|
|
396
|
+
print("API key for profile '{}' has been encrypted successfully.".format(found_profile.name))
|
|
397
|
+
|
|
320
398
|
elif args['sub_command'] == 'web-editor':
|
|
321
399
|
run_web_editor(host=args['host'], port=args['port'])
|
|
322
400
|
|
|
@@ -359,6 +437,7 @@ def print_keys(keys: list[paramiko.AgentKey], display_index=True) -> None:
|
|
|
359
437
|
def run_web_editor(host: str = '127.0.0.1', port: int = 5000) -> None:
|
|
360
438
|
"""Start the Flask web server for credential management."""
|
|
361
439
|
try:
|
|
440
|
+
# noinspection PyUnusedImports
|
|
362
441
|
from flask import Flask, jsonify, request, send_from_directory
|
|
363
442
|
except ImportError:
|
|
364
443
|
print("Flask is not installed. Please install it with: pip install flask")
|
|
@@ -397,6 +476,7 @@ def run_web_editor(host: str = '127.0.0.1', port: int = 5000) -> None:
|
|
|
397
476
|
'org_id': cred.org_id,
|
|
398
477
|
'api_user': cred.api_user,
|
|
399
478
|
'verify_ssl': cred.verify_ssl,
|
|
479
|
+
'api_key_encrypted': cred.is_api_key_encrypted(),
|
|
400
480
|
'originating_file': cred.originating_file
|
|
401
481
|
})
|
|
402
482
|
return jsonify(result)
|
|
@@ -414,6 +494,7 @@ def run_web_editor(host: str = '127.0.0.1', port: int = 5000) -> None:
|
|
|
414
494
|
'org_id': found_profile.org_id,
|
|
415
495
|
'api_user': found_profile.api_user,
|
|
416
496
|
'verify_ssl': found_profile.verify_ssl,
|
|
497
|
+
'api_key_encrypted': found_profile.is_api_key_encrypted(),
|
|
417
498
|
'originating_file': found_profile.originating_file
|
|
418
499
|
})
|
|
419
500
|
|
|
@@ -585,6 +666,101 @@ def run_web_editor(host: str = '127.0.0.1', port: int = 5000) -> None:
|
|
|
585
666
|
def api_encryption_status():
|
|
586
667
|
return jsonify({'available': is_encryption_available()})
|
|
587
668
|
|
|
669
|
+
# API: Encrypt a credential's API key
|
|
670
|
+
@app.route('/api/credentials/<name>/encrypt', methods=['POST'])
|
|
671
|
+
def api_encrypt_credential(name):
|
|
672
|
+
data = request.get_json()
|
|
673
|
+
if not data:
|
|
674
|
+
return jsonify({'error': 'No data provided'}), 400
|
|
675
|
+
|
|
676
|
+
if not is_encryption_available():
|
|
677
|
+
return jsonify({'error': 'Encryption is not available. Please ensure an SSH agent is running with RSA or Ed25519 keys added.'}), 400
|
|
678
|
+
|
|
679
|
+
found_profile = get_credentials_from_file(name, fail_with_an_exception=False)
|
|
680
|
+
if found_profile is None:
|
|
681
|
+
return jsonify({'error': 'Credential not found'}), 404
|
|
682
|
+
|
|
683
|
+
# Check if already encrypted
|
|
684
|
+
if found_profile.is_api_key_encrypted():
|
|
685
|
+
return jsonify({'error': 'API key is already encrypted'}), 400
|
|
686
|
+
|
|
687
|
+
# Get the SSH key index
|
|
688
|
+
if 'ssh_key_index' not in data:
|
|
689
|
+
return jsonify({'error': 'ssh_key_index is required'}), 400
|
|
690
|
+
|
|
691
|
+
try:
|
|
692
|
+
ssh_keys = get_supported_keys_from_ssh_agent()
|
|
693
|
+
key_index = int(data['ssh_key_index'])
|
|
694
|
+
if key_index < 0 or key_index >= len(ssh_keys):
|
|
695
|
+
return jsonify({'error': 'Invalid SSH key index'}), 400
|
|
696
|
+
|
|
697
|
+
selected_ssh_key = ssh_keys[key_index]
|
|
698
|
+
encrypted_api_key = encrypt_api_key_with_paramiko_ssh_key_chacha20poly1305(
|
|
699
|
+
ssh_key=selected_ssh_key, api_key=found_profile.api_key)
|
|
700
|
+
|
|
701
|
+
# Verify encryption
|
|
702
|
+
decrypted_api_key = decrypt_api_key_with_paramiko_ssh_key_chacha20poly1305(
|
|
703
|
+
encrypted_api_key_payload=encrypted_api_key)
|
|
704
|
+
if decrypted_api_key != found_profile.api_key:
|
|
705
|
+
return jsonify({'error': 'Encryption verification failed'}), 500
|
|
706
|
+
|
|
707
|
+
# Update the credential
|
|
708
|
+
credentials_data: CredentialFileEntry = {
|
|
709
|
+
"name": found_profile.name,
|
|
710
|
+
"fqdn": found_profile.fqdn,
|
|
711
|
+
"port": found_profile.port,
|
|
712
|
+
"org_id": found_profile.org_id,
|
|
713
|
+
"api_user": found_profile.api_user,
|
|
714
|
+
"verify_ssl": found_profile.verify_ssl,
|
|
715
|
+
"api_key": encrypted_api_key
|
|
716
|
+
}
|
|
717
|
+
|
|
718
|
+
create_credential_in_file(file_full_path=found_profile.originating_file,
|
|
719
|
+
data=credentials_data, overwrite_existing_profile=True)
|
|
720
|
+
return jsonify({'success': True, 'message': f"API key for '{name}' encrypted successfully"})
|
|
721
|
+
except Exception as e:
|
|
722
|
+
return jsonify({'error': f'Encryption failed: {str(e)}'}), 500
|
|
723
|
+
|
|
724
|
+
# Flag to track shutdown request
|
|
725
|
+
shutdown_requested = {'value': False}
|
|
726
|
+
|
|
727
|
+
# API: Request server shutdown
|
|
728
|
+
@app.route('/api/shutdown', methods=['POST'])
|
|
729
|
+
def api_shutdown():
|
|
730
|
+
shutdown_requested['value'] = True
|
|
731
|
+
return jsonify({'success': True, 'message': 'Shutdown acknowledged. Server will stop shortly.'})
|
|
732
|
+
|
|
733
|
+
# Shutdown check and security headers after each request
|
|
734
|
+
@app.after_request
|
|
735
|
+
def check_shutdown(response):
|
|
736
|
+
# Add security headers to prevent XSS and other attacks
|
|
737
|
+
response.headers['Content-Security-Policy'] = (
|
|
738
|
+
"default-src 'self'; "
|
|
739
|
+
"script-src 'self' 'unsafe-inline'; "
|
|
740
|
+
"style-src 'self' 'unsafe-inline'; "
|
|
741
|
+
"img-src 'self' data:; "
|
|
742
|
+
"font-src 'self'; "
|
|
743
|
+
"connect-src 'self'; "
|
|
744
|
+
"frame-ancestors 'none'; "
|
|
745
|
+
"base-uri 'self'; "
|
|
746
|
+
"form-action 'self'"
|
|
747
|
+
)
|
|
748
|
+
response.headers['X-Content-Type-Options'] = 'nosniff'
|
|
749
|
+
response.headers['X-Frame-Options'] = 'DENY'
|
|
750
|
+
response.headers['X-XSS-Protection'] = '1; mode=block'
|
|
751
|
+
response.headers['Referrer-Policy'] = 'strict-origin-when-cross-origin'
|
|
752
|
+
|
|
753
|
+
if shutdown_requested['value']:
|
|
754
|
+
# Schedule shutdown after response is sent
|
|
755
|
+
def shutdown():
|
|
756
|
+
import time
|
|
757
|
+
time.sleep(1) # Give time for the response to be sent
|
|
758
|
+
print("\nShutdown requested via web UI. Stopping server...")
|
|
759
|
+
os._exit(0)
|
|
760
|
+
import threading
|
|
761
|
+
threading.Thread(target=shutdown, daemon=True).start()
|
|
762
|
+
return response
|
|
763
|
+
|
|
588
764
|
print(f"Starting web editor at http://{host}:{port}")
|
|
589
765
|
print("Press Ctrl+C to stop the server")
|
|
590
766
|
app.run(host=host, port=port, debug=False)
|
|
@@ -0,0 +1,358 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
import os
|
|
4
|
+
from typing import Dict, List, Literal
|
|
5
|
+
from zoneinfo import ZoneInfo
|
|
6
|
+
|
|
7
|
+
import illumio_pylo as pylo
|
|
8
|
+
from illumio_pylo import ArraysToExcel, ExcelHeader, ExplorerResultV2
|
|
9
|
+
from .utils.misc import make_filename_with_timestamp
|
|
10
|
+
from . import Command
|
|
11
|
+
|
|
12
|
+
command_name = 'traffic-export'
|
|
13
|
+
objects_load_filter: List[pylo.ObjectTypes] = ['labels', 'labelgroups', 'iplists', 'services']
|
|
14
|
+
|
|
15
|
+
# Base column definitions for traffic export
|
|
16
|
+
BASE_COLUMNS = ['src_ip', 'src_iplist', 'src_workload']
|
|
17
|
+
DST_BASE_COLUMNS = ['dst_ip', 'dst_iplist', 'dst_workload']
|
|
18
|
+
SERVICE_COLUMNS = ['protocol', 'port']
|
|
19
|
+
POLICY_COLUMNS = ['policy_decision', 'draft_policy_decision']
|
|
20
|
+
TIME_COLUMNS = ['first_detected', 'last_detected']
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _generate_omit_columns_help() -> str:
|
|
24
|
+
"""Generate help text for --omit-columns with all available base columns."""
|
|
25
|
+
base_cols = ', '.join(BASE_COLUMNS + DST_BASE_COLUMNS + SERVICE_COLUMNS + POLICY_COLUMNS + TIME_COLUMNS)
|
|
26
|
+
return (f'Column names to omit from the export (e.g., protocol, port). '
|
|
27
|
+
f'Base columns: {base_cols}. '
|
|
28
|
+
f'Label columns are added dynamically based on label types (e.g., src_app, dst_env, etc.). '
|
|
29
|
+
f'When --consolidate-labels is used, src_labels and dst_labels are available instead of individual label columns.')
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def fill_parser(parser: argparse.ArgumentParser):
|
|
33
|
+
parser.description = "Export traffic records from the PCE based on specified filters and settings."
|
|
34
|
+
|
|
35
|
+
parser.add_argument('--format', '-f', required=False, default='excel', choices=['csv', 'excel'],
|
|
36
|
+
help='Output file format')
|
|
37
|
+
parser.add_argument('--output-dir', '-o', required=False, default='output',
|
|
38
|
+
help='Directory where to save the output file')
|
|
39
|
+
|
|
40
|
+
parser.add_argument('--source-filters', '-sf', required=False, type=str, nargs='+', default=None,
|
|
41
|
+
help='Source filters to apply (e.g. label:Web, iplist:Private_Networks)')
|
|
42
|
+
parser.add_argument('--destination-filters', '-df', required=False, type=str, nargs='*', default=None,
|
|
43
|
+
help='Destination filters to apply (e.g. label:DB, iplist:Public_NATed)')
|
|
44
|
+
|
|
45
|
+
parser.add_argument('--since-timestamp', '-st', required=False, type=str, default=None,
|
|
46
|
+
help='Export traffic records since this timestamp (ISO 8601 format)')
|
|
47
|
+
parser.add_argument('--until-timestamp', '-ut', required=False, type=str, default=None,
|
|
48
|
+
help='Export traffic records until this timestamp (ISO 8601 format)')
|
|
49
|
+
parser.add_argument('--timeframe-hours', '-tfh', required=False, type=int, default=None,
|
|
50
|
+
help='Export traffic records from the last X hours (overrides --since-timestamp and --until-timestamp)')
|
|
51
|
+
parser.add_argument('--records-count-limit', '-rl', required=False, type=int, default=10000,
|
|
52
|
+
help='Maximum number of records to export')
|
|
53
|
+
|
|
54
|
+
parser.add_argument('--draft-mode-enabled', '-dme', action='store_true', required=False, default=False,
|
|
55
|
+
help='Enable draft mode to recalculate policy decisions based on draft rules')
|
|
56
|
+
parser.add_argument('--protocol-names', '-pn', action='store_true', required=False, default=False,
|
|
57
|
+
help='Translate common protocol numbers to names (e.g., 6 -> TCP) before export')
|
|
58
|
+
parser.add_argument('--timezone', '-tz', required=False, type=str, default=None,
|
|
59
|
+
help='Convert timestamps to this timezone (e.g., America/New_York, Europe/Paris). If not specified, timestamps remain in UTC.')
|
|
60
|
+
parser.add_argument('--consolidate-labels', '-cl', action='store_true', required=False, default=False,
|
|
61
|
+
help='Consolidate all workload labels into a single column (src_labels, dst_labels) as comma-separated values, ordered by label types')
|
|
62
|
+
parser.add_argument('--label-separator', '-ls', required=False, type=str, default=',',
|
|
63
|
+
help='Separator to use when consolidating labels (default: ","). Only applies when --consolidate-labels is enabled. Examples: ", ", " ", "|", ";"')
|
|
64
|
+
parser.add_argument('--disable-wrap-text', '-dwt', action='store_true', required=False, default=False,
|
|
65
|
+
help='Disable text wrapping for all report columns (enabled by default)')
|
|
66
|
+
parser.add_argument('--omit-columns', '-oc', required=False, type=str, nargs='+', default=None,
|
|
67
|
+
help=_generate_omit_columns_help())
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def __main(args: Dict, org: pylo.Organization, **kwargs):
|
|
71
|
+
settings_output_file_format: Literal['csv', 'excel'] = args['format']
|
|
72
|
+
settings_output_dir: str = args['output_dir']
|
|
73
|
+
settings_source_filters: List[str] | None = args['source_filters']
|
|
74
|
+
settings_destination_filters: List[str] | None = args['destination_filters']
|
|
75
|
+
settings_since_timestamp: str | None = args['since_timestamp']
|
|
76
|
+
settings_until_timestamp: str | None = args['until_timestamp']
|
|
77
|
+
settings_timeframe_hours: int | None = args['timeframe_hours']
|
|
78
|
+
settings_records_count_limit: int = args['records_count_limit']
|
|
79
|
+
settings_draft_mode_enabled: bool = args['draft_mode_enabled']
|
|
80
|
+
settings_protocol_names: bool = args['protocol_names']
|
|
81
|
+
settings_timezone: str | None = args['timezone']
|
|
82
|
+
settings_consolidate_labels: bool = args['consolidate_labels']
|
|
83
|
+
settings_label_separator: str = args['label_separator']
|
|
84
|
+
settings_disable_wrap_text: bool = args['disable_wrap_text']
|
|
85
|
+
settings_omit_columns: List[str] | None = args['omit_columns']
|
|
86
|
+
|
|
87
|
+
explorer_query = org.connector.new_explorer_query_v2(max_results=settings_records_count_limit, draft_mode_enabled=settings_draft_mode_enabled)
|
|
88
|
+
|
|
89
|
+
def _apply_filters(filter_values: List[str] | None, filter_set: pylo.ExplorerFilterSetV2, descriptor: Literal['source', 'destination']):
|
|
90
|
+
valid_filter_prefixes = ['label:', 'iplist:']
|
|
91
|
+
if filter_values is None:
|
|
92
|
+
return
|
|
93
|
+
|
|
94
|
+
for filter_value in filter_values:
|
|
95
|
+
# a single filter may be made of multiple comma-separated values which will be processed individually
|
|
96
|
+
value_parts = [part.strip() for part in filter_value.split(',') if part.strip() != '']
|
|
97
|
+
if descriptor=='source':
|
|
98
|
+
filter = filter_set.new_source_filter()
|
|
99
|
+
else:
|
|
100
|
+
filter = filter_set.new_destination_filter()
|
|
101
|
+
|
|
102
|
+
for filter_item_string in value_parts:
|
|
103
|
+
if filter_item_string.startswith('label:'):
|
|
104
|
+
label_name = filter_item_string[len('label:'):]
|
|
105
|
+
label_search_result: List[pylo.Label] | None = org.LabelStore.find_label_by_name(label_name,
|
|
106
|
+
raise_exception_if_not_found=False,
|
|
107
|
+
case_sensitive=False)
|
|
108
|
+
if len(label_search_result) == 0:
|
|
109
|
+
raise pylo.PyloEx(f"Label '{label_name}' not found in PCE!")
|
|
110
|
+
elif len(label_search_result) > 1:
|
|
111
|
+
raise pylo.PyloEx(f"Multiple labels found for name '{label_name}', please use a more specific name or enable case sensitivity!")
|
|
112
|
+
|
|
113
|
+
filter.add_label(label_search_result[0])
|
|
114
|
+
|
|
115
|
+
elif filter_item_string.startswith('iplist:'):
|
|
116
|
+
iplist_name = filter_item_string[len('iplist:'):]
|
|
117
|
+
iplist_obj = org.IPListStore.find_by_name(iplist_name)
|
|
118
|
+
if iplist_obj is None:
|
|
119
|
+
raise pylo.PyloEx(f"IPList '{iplist_name}' not found in PCE!")
|
|
120
|
+
filter.add_iplist(iplist_obj)
|
|
121
|
+
else:
|
|
122
|
+
raise pylo.PyloEx(f"Invalid {descriptor} filter format: '{filter_item_string}', valid prefixes are: {valid_filter_prefixes}")
|
|
123
|
+
|
|
124
|
+
# Processing time filters
|
|
125
|
+
if settings_timeframe_hours is not None:
|
|
126
|
+
if settings_since_timestamp is not None or settings_until_timestamp is not None:
|
|
127
|
+
raise pylo.PyloEx("--timeframe-hours cannot be used together with --since-timestamp or --until-timestamp")
|
|
128
|
+
explorer_query.filters.set_time_from_x_seconds_ago(settings_timeframe_hours * 3600)
|
|
129
|
+
else:
|
|
130
|
+
if settings_since_timestamp is not None:
|
|
131
|
+
try:
|
|
132
|
+
explorer_query.filters.set_time_from(datetime.fromisoformat(settings_since_timestamp))
|
|
133
|
+
except ValueError:
|
|
134
|
+
raise pylo.PyloEx("Invalid --since-timestamp format, please use ISO 8601 format")
|
|
135
|
+
else:
|
|
136
|
+
raise pylo.PyloEx("Either --since-timestamp or --timeframe-hours must be provided")
|
|
137
|
+
|
|
138
|
+
if settings_until_timestamp is not None:
|
|
139
|
+
try:
|
|
140
|
+
explorer_query.filters.set_time_to(datetime.fromisoformat(settings_until_timestamp))
|
|
141
|
+
except ValueError:
|
|
142
|
+
raise pylo.PyloEx("Invalid --until-timestamp format, please use ISO 8601 format")
|
|
143
|
+
|
|
144
|
+
# Processing source filters
|
|
145
|
+
_apply_filters(settings_source_filters, explorer_query.filters, 'source')
|
|
146
|
+
|
|
147
|
+
# Processing destination filters
|
|
148
|
+
_apply_filters(settings_destination_filters, explorer_query.filters, 'destination')
|
|
149
|
+
|
|
150
|
+
print("Executing and downloading traffic export query... ", flush=True, end='')
|
|
151
|
+
query_results = explorer_query.execute()
|
|
152
|
+
print("DONE")
|
|
153
|
+
|
|
154
|
+
print("Processing traffic records... ", flush=True, end='')
|
|
155
|
+
records: List[ExplorerResultV2] = query_results.get_all_records()
|
|
156
|
+
print(f"DONE - {len(records)} records retrieved")
|
|
157
|
+
|
|
158
|
+
# Get label types from the organization
|
|
159
|
+
label_types = org.LabelStore.label_types
|
|
160
|
+
|
|
161
|
+
# Define base columns and dynamically add label columns
|
|
162
|
+
if settings_consolidate_labels:
|
|
163
|
+
# Use consolidated label columns
|
|
164
|
+
src_label_columns = ['src_labels']
|
|
165
|
+
dst_label_columns = ['dst_labels']
|
|
166
|
+
else:
|
|
167
|
+
# Use individual label columns
|
|
168
|
+
src_label_columns = [f'src_{label_type}' for label_type in label_types]
|
|
169
|
+
dst_label_columns = [f'dst_{label_type}' for label_type in label_types]
|
|
170
|
+
|
|
171
|
+
# Build policy columns, excluding draft_policy_decision if draft mode is not enabled
|
|
172
|
+
policy_columns = POLICY_COLUMNS.copy()
|
|
173
|
+
if not settings_draft_mode_enabled:
|
|
174
|
+
policy_columns = [col for col in policy_columns if col != 'draft_policy_decision']
|
|
175
|
+
|
|
176
|
+
# Construct all columns in the correct order:
|
|
177
|
+
# src_ip, src_workload, src_labels, dst_ip, dst_workload, dst_labels, protocol, port, policy_decision, [draft_policy_decision], first_detected, last_detected
|
|
178
|
+
all_columns = (BASE_COLUMNS + src_label_columns + DST_BASE_COLUMNS + dst_label_columns +
|
|
179
|
+
SERVICE_COLUMNS + policy_columns + TIME_COLUMNS)
|
|
180
|
+
|
|
181
|
+
# Process omit-columns setting
|
|
182
|
+
columns_to_include = all_columns.copy()
|
|
183
|
+
if settings_omit_columns is not None:
|
|
184
|
+
# Validate column names
|
|
185
|
+
omit_columns_lower = [col.lower() for col in settings_omit_columns]
|
|
186
|
+
invalid_columns = [col for col in omit_columns_lower if col not in all_columns]
|
|
187
|
+
if invalid_columns:
|
|
188
|
+
raise pylo.PyloEx(f"Invalid column names in --omit-columns: {invalid_columns}. Available columns: {all_columns}")
|
|
189
|
+
|
|
190
|
+
# Remove omitted columns
|
|
191
|
+
columns_to_include = [col for col in all_columns if col not in omit_columns_lower]
|
|
192
|
+
|
|
193
|
+
# Ensure at least one column remains
|
|
194
|
+
if len(columns_to_include) == 0:
|
|
195
|
+
raise pylo.PyloEx("Cannot omit all columns. At least one column must be included in the export.")
|
|
196
|
+
|
|
197
|
+
# Validate timezone if provided
|
|
198
|
+
target_timezone = None
|
|
199
|
+
if settings_timezone is not None:
|
|
200
|
+
try:
|
|
201
|
+
target_timezone = ZoneInfo(settings_timezone)
|
|
202
|
+
except Exception as e:
|
|
203
|
+
raise pylo.PyloEx(f"Invalid timezone '{settings_timezone}': {e}")
|
|
204
|
+
|
|
205
|
+
# Build headers based on columns to include
|
|
206
|
+
header_definitions = {
|
|
207
|
+
'src_ip': ExcelHeader(name='src_ip', max_width=18),
|
|
208
|
+
'src_iplist': ExcelHeader(name='src_iplist', max_width=40),
|
|
209
|
+
'src_workload': ExcelHeader(name='src_workload', max_width=30),
|
|
210
|
+
'dst_ip': ExcelHeader(name='dst_ip', max_width=18),
|
|
211
|
+
'dst_iplist': ExcelHeader(name='dst_iplist', max_width=40),
|
|
212
|
+
'dst_workload': ExcelHeader(name='dst_workload', max_width=30),
|
|
213
|
+
'protocol': ExcelHeader(name='protocol', max_width=18),
|
|
214
|
+
'port': ExcelHeader(name='port', max_width=12),
|
|
215
|
+
'policy_decision': ExcelHeader(name='policy_decision', max_width=20),
|
|
216
|
+
'first_detected': ExcelHeader(name='first_detected', max_width=22),
|
|
217
|
+
'last_detected': ExcelHeader(name='last_detected', max_width=22),
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
# Add a draft_policy_decision header only if draft mode is enabled
|
|
221
|
+
if settings_draft_mode_enabled:
|
|
222
|
+
header_definitions['draft_policy_decision'] = ExcelHeader(name='draft_policy_decision', max_width=25)
|
|
223
|
+
|
|
224
|
+
# Add dynamic label column headers
|
|
225
|
+
if settings_consolidate_labels:
|
|
226
|
+
header_definitions['src_labels'] = ExcelHeader(name='src_labels', max_width=50)
|
|
227
|
+
header_definitions['dst_labels'] = ExcelHeader(name='dst_labels', max_width=50)
|
|
228
|
+
else:
|
|
229
|
+
for label_type in label_types:
|
|
230
|
+
header_definitions[f'src_{label_type}'] = ExcelHeader(name=f'src_{label_type}', max_width=25)
|
|
231
|
+
header_definitions[f'dst_{label_type}'] = ExcelHeader(name=f'dst_{label_type}', max_width=25)
|
|
232
|
+
|
|
233
|
+
csv_report_headers = pylo.ExcelHeaderSet([
|
|
234
|
+
header_definitions[col] for col in columns_to_include
|
|
235
|
+
])
|
|
236
|
+
csv_report = ArraysToExcel()
|
|
237
|
+
sheet = csv_report.create_sheet(
|
|
238
|
+
'traffic',
|
|
239
|
+
csv_report_headers,
|
|
240
|
+
force_all_wrap_text=not settings_disable_wrap_text,
|
|
241
|
+
multivalues_cell_delimiter=','
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
def _protocol_display(proto: str | int | None) -> str | int | None:
|
|
245
|
+
"""Return a human-readable protocol name when known; otherwise the original value."""
|
|
246
|
+
if proto is None:
|
|
247
|
+
return None
|
|
248
|
+
# Accept ints or numeric strings; fallback to original value on conversion issues.
|
|
249
|
+
try:
|
|
250
|
+
proto_int = int(proto)
|
|
251
|
+
except (ValueError, TypeError):
|
|
252
|
+
return proto
|
|
253
|
+
|
|
254
|
+
common_protocols = {
|
|
255
|
+
1: 'ICMP',
|
|
256
|
+
6: 'TCP',
|
|
257
|
+
17: 'UDP',
|
|
258
|
+
50: 'ESP',
|
|
259
|
+
51: 'AH',
|
|
260
|
+
132: 'SCTP'
|
|
261
|
+
}
|
|
262
|
+
return common_protocols.get(proto_int, proto)
|
|
263
|
+
|
|
264
|
+
def _convert_timestamp(timestamp_str: str | None, target_tz: ZoneInfo | None) -> str | None:
|
|
265
|
+
"""Convert UTC ISO 8601 timestamp to target timezone if specified, otherwise return as-is."""
|
|
266
|
+
if timestamp_str is None or target_tz is None:
|
|
267
|
+
return timestamp_str
|
|
268
|
+
|
|
269
|
+
try:
|
|
270
|
+
# Parse the UTC timestamp
|
|
271
|
+
dt = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
|
|
272
|
+
# Convert to the target timezone
|
|
273
|
+
dt_converted = dt.astimezone(target_tz)
|
|
274
|
+
# Return as ISO 8601 string
|
|
275
|
+
return dt_converted.isoformat()
|
|
276
|
+
except Exception:
|
|
277
|
+
# If conversion fails, return original
|
|
278
|
+
return timestamp_str
|
|
279
|
+
|
|
280
|
+
def _format_iplists(iplists: Dict[str, pylo.IPList]) -> str | None:
|
|
281
|
+
if not iplists:
|
|
282
|
+
return None
|
|
283
|
+
names: List[str] = []
|
|
284
|
+
for iplist in iplists.values():
|
|
285
|
+
if iplist.name:
|
|
286
|
+
names.append(iplist.name)
|
|
287
|
+
else:
|
|
288
|
+
names.append(iplist.href)
|
|
289
|
+
if not names:
|
|
290
|
+
return None
|
|
291
|
+
return ','.join(sorted(set(names), key=str.lower))
|
|
292
|
+
|
|
293
|
+
for record in records:
|
|
294
|
+
# Build a full record with all columns
|
|
295
|
+
full_record_to_export = {
|
|
296
|
+
'src_ip': record.source_ip,
|
|
297
|
+
'src_iplist': _format_iplists(record.get_source_iplists(org)),
|
|
298
|
+
'src_workload': record.source_workload_hostname,
|
|
299
|
+
'dst_ip': record.destination_ip,
|
|
300
|
+
'dst_iplist': _format_iplists(record.get_destination_iplists(org)),
|
|
301
|
+
'dst_workload': record.destination_workload_hostname,
|
|
302
|
+
'protocol': _protocol_display(record.service_protocol) if settings_protocol_names else record.service_protocol,
|
|
303
|
+
'port': record.service_port,
|
|
304
|
+
'policy_decision': record.policy_decision_string,
|
|
305
|
+
'draft_policy_decision': record.draft_mode_policy_decision_to_str() if settings_draft_mode_enabled else None,
|
|
306
|
+
'first_detected': _convert_timestamp(record.first_detected, target_timezone),
|
|
307
|
+
'last_detected': _convert_timestamp(record.last_detected, target_timezone),
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
# Add source workload labels
|
|
311
|
+
if settings_consolidate_labels:
|
|
312
|
+
# Consolidate all labels into a single comma-separated string, ordered by label types
|
|
313
|
+
if record.source_workload_href:
|
|
314
|
+
src_label_values = [record.source_workload_labels_by_type.get(label_type) for label_type in label_types]
|
|
315
|
+
src_label_values = [lv for lv in src_label_values if lv is not None]
|
|
316
|
+
full_record_to_export['src_labels'] = settings_label_separator.join(src_label_values) if src_label_values else None
|
|
317
|
+
else:
|
|
318
|
+
full_record_to_export['src_labels'] = None
|
|
319
|
+
else:
|
|
320
|
+
for label_type in label_types:
|
|
321
|
+
full_record_to_export[f'src_{label_type}'] = record.source_workload_labels_by_type.get(label_type) if record.source_workload_href else None
|
|
322
|
+
|
|
323
|
+
# Add destination workload labels
|
|
324
|
+
if settings_consolidate_labels:
|
|
325
|
+
# Consolidate all labels into a single comma-separated string, ordered by label types
|
|
326
|
+
if record.destination_workload_href:
|
|
327
|
+
dst_label_values = [record.destination_workload_labels_by_type.get(label_type) for label_type in label_types]
|
|
328
|
+
dst_label_values = [lv for lv in dst_label_values if lv is not None]
|
|
329
|
+
full_record_to_export['dst_labels'] = settings_label_separator.join(dst_label_values) if dst_label_values else None
|
|
330
|
+
else:
|
|
331
|
+
full_record_to_export['dst_labels'] = None
|
|
332
|
+
else:
|
|
333
|
+
for label_type in label_types:
|
|
334
|
+
full_record_to_export[f'dst_{label_type}'] = record.destination_workload_labels_by_type.get(label_type) if record.destination_workload_href else None
|
|
335
|
+
|
|
336
|
+
# Filter to include only selected columns
|
|
337
|
+
csv_record = {col: full_record_to_export[col] for col in columns_to_include}
|
|
338
|
+
sheet.add_line_from_object(csv_record)
|
|
339
|
+
|
|
340
|
+
if sheet.lines_count() < 1:
|
|
341
|
+
print("No traffic records matched the filters; nothing to export.")
|
|
342
|
+
return
|
|
343
|
+
|
|
344
|
+
os.makedirs(settings_output_dir, exist_ok=True)
|
|
345
|
+
output_filename_base = make_filename_with_timestamp('traffic-export_', settings_output_dir)
|
|
346
|
+
|
|
347
|
+
if settings_output_file_format == 'csv':
|
|
348
|
+
output_filename = output_filename_base + '.csv'
|
|
349
|
+
print(f"Writing CSV report to '{output_filename}' ... ", end='', flush=True)
|
|
350
|
+
sheet.write_to_csv(output_filename)
|
|
351
|
+
else:
|
|
352
|
+
output_filename = output_filename_base + '.xlsx'
|
|
353
|
+
print(f"Writing Excel report to '{output_filename}' ... ", end='', flush=True)
|
|
354
|
+
csv_report.write_to_excel(output_filename)
|
|
355
|
+
print("DONE")
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
command_object = Command(command_name, __main, fill_parser, load_specific_objects_only=objects_load_filter)
|