catocli 2.0.2__py3-none-any.whl → 2.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of catocli might be problematic. Click here for more details.
- catocli/Utils/clidriver.py +4 -1
- catocli/__init__.py +1 -1
- catocli/parsers/custom/__init__.py +4 -3
- catocli/parsers/custom/customLib.py +239 -1
- catocli/parsers/custom/export_rules/export_rules.py +1 -1
- catocli/parsers/custom/export_sites/export_sites.py +186 -54
- catocli/parsers/custom/import_rules_to_tf/__init__.py +1 -1
- catocli/parsers/custom/import_rules_to_tf/import_rules_to_tf.py +1 -137
- catocli/parsers/custom/import_sites_to_tf/__init__.py +45 -0
- catocli/parsers/custom/import_sites_to_tf/import_sites_to_tf.py +891 -0
- catocli/parsers/mutation_accountManagement/__init__.py +6 -6
- catocli/parsers/mutation_admin/__init__.py +6 -6
- catocli/parsers/mutation_container/__init__.py +2 -2
- catocli/parsers/mutation_hardware/__init__.py +2 -2
- catocli/parsers/mutation_policy/__init__.py +192 -192
- catocli/parsers/mutation_sandbox/__init__.py +4 -4
- catocli/parsers/mutation_site/__init__.py +56 -56
- catocli/parsers/mutation_sites/__init__.py +56 -56
- catocli/parsers/mutation_xdr/__init__.py +6 -6
- catocli/parsers/parserApiClient.py +36 -11
- catocli/parsers/query_accountBySubdomain/__init__.py +2 -2
- catocli/parsers/query_accountManagement/__init__.py +2 -2
- catocli/parsers/query_accountMetrics/__init__.py +2 -2
- catocli/parsers/query_accountRoles/__init__.py +2 -2
- catocli/parsers/query_accountSnapshot/__init__.py +2 -2
- catocli/parsers/query_admin/__init__.py +2 -2
- catocli/parsers/query_admins/__init__.py +2 -2
- catocli/parsers/query_appStats/__init__.py +2 -2
- catocli/parsers/query_appStatsTimeSeries/__init__.py +2 -2
- catocli/parsers/query_auditFeed/__init__.py +2 -2
- catocli/parsers/query_catalogs/__init__.py +2 -2
- catocli/parsers/query_container/__init__.py +2 -2
- catocli/parsers/query_devices/__init__.py +2 -2
- catocli/parsers/query_entityLookup/__init__.py +2 -2
- catocli/parsers/query_events/__init__.py +2 -2
- catocli/parsers/query_eventsFeed/__init__.py +2 -2
- catocli/parsers/query_eventsTimeSeries/__init__.py +2 -2
- catocli/parsers/query_hardware/__init__.py +2 -2
- catocli/parsers/query_hardwareManagement/__init__.py +2 -2
- catocli/parsers/query_licensing/__init__.py +2 -2
- catocli/parsers/query_policy/__init__.py +2 -2
- catocli/parsers/query_sandbox/__init__.py +2 -2
- catocli/parsers/query_site/__init__.py +2 -2
- catocli/parsers/query_siteLocation/__init__.py +2 -2
- catocli/parsers/query_subDomains/__init__.py +2 -2
- catocli/parsers/query_xdr/__init__.py +4 -4
- catocli/parsers/raw/README.md +4 -0
- catocli/parsers/raw/__init__.py +3 -2
- {catocli-2.0.2.dist-info → catocli-2.0.3.dist-info}/METADATA +1 -1
- {catocli-2.0.2.dist-info → catocli-2.0.3.dist-info}/RECORD +55 -53
- schema/catolib.py +14 -9
- {catocli-2.0.2.dist-info → catocli-2.0.3.dist-info}/LICENSE +0 -0
- {catocli-2.0.2.dist-info → catocli-2.0.3.dist-info}/WHEEL +0 -0
- {catocli-2.0.2.dist-info → catocli-2.0.3.dist-info}/entry_points.txt +0 -0
- {catocli-2.0.2.dist-info → catocli-2.0.3.dist-info}/top_level.txt +0 -0
catocli/Utils/clidriver.py
CHANGED
|
@@ -256,6 +256,9 @@ def main(args=None):
|
|
|
256
256
|
else:
|
|
257
257
|
if response!=None:
|
|
258
258
|
print(json.dumps(response[0], sort_keys=True, indent=4))
|
|
259
|
+
except KeyboardInterrupt:
|
|
260
|
+
print("\n\nOperation interrupted by user (Ctrl+C). Exiting gracefully...")
|
|
261
|
+
exit(130) # Standard exit code for SIGINT
|
|
259
262
|
except Exception as e:
|
|
260
263
|
if isinstance(e, AttributeError):
|
|
261
264
|
print('Missing arguments. Usage: catocli <operation> -h')
|
|
@@ -265,4 +268,4 @@ def main(args=None):
|
|
|
265
268
|
else:
|
|
266
269
|
print('ERROR: ',e)
|
|
267
270
|
traceback.print_exc()
|
|
268
|
-
|
|
271
|
+
exit(1)
|
catocli/__init__.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
__version__ = "2.0.
|
|
1
|
+
__version__ = "2.0.3"
|
|
2
2
|
__cato_host__ = "https://api.catonetworks.com/api/v1/graphql2"
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
|
|
2
2
|
import catocli.parsers.custom.customLib as customLib
|
|
3
3
|
from catocli.parsers.custom.export_rules import export_rules_parse
|
|
4
|
-
from catocli.parsers.custom.import_rules_to_tf import
|
|
4
|
+
from catocli.parsers.custom.import_rules_to_tf import rule_import_parse
|
|
5
|
+
from catocli.parsers.custom.import_sites_to_tf import site_import_parse
|
|
5
6
|
from catocli.parsers.configure import configure_parse
|
|
6
7
|
from catocli.parsers.custom.export_sites import export_sites_parse
|
|
7
8
|
|
|
@@ -33,14 +34,14 @@ def custom_parse(subparsers):
|
|
|
33
34
|
|
|
34
35
|
# Add additional custom parsers here
|
|
35
36
|
export_rules_parse(subparsers)
|
|
36
|
-
|
|
37
|
+
import_parser = rule_import_parse(subparsers)
|
|
38
|
+
site_import_parse(subparsers, import_parser)
|
|
37
39
|
configure_parse(subparsers)
|
|
38
40
|
|
|
39
41
|
def get_help_custom(path):
|
|
40
42
|
matchCmd = "catocli "+path.replace("_"," ")
|
|
41
43
|
import os
|
|
42
44
|
pwd = os.path.dirname(__file__)
|
|
43
|
-
# doc = path+"/README.md"
|
|
44
45
|
abs_path = os.path.join(pwd, "README.md")
|
|
45
46
|
new_line = "\nEXAMPLES:\n"
|
|
46
47
|
lines = open(abs_path, "r").readlines()
|
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import json
|
|
3
|
+
import subprocess
|
|
4
|
+
import glob
|
|
5
|
+
from pathlib import Path
|
|
3
6
|
from graphql_client.api.call_api import ApiClient, CallApi
|
|
4
7
|
from graphql_client.api_client import ApiException
|
|
5
8
|
import logging
|
|
@@ -320,4 +323,239 @@ def getAccountID(args, configuration):
|
|
|
320
323
|
if not account_id:
|
|
321
324
|
raise ValueError("Account ID is required. Provide it using the -accountID flag or set CATO_ACCOUNT_ID environment variable.")
|
|
322
325
|
|
|
323
|
-
return account_id
|
|
326
|
+
return account_id
|
|
327
|
+
|
|
328
|
+
def check_terraform_binary():
|
|
329
|
+
"""Check if terraform binary is available"""
|
|
330
|
+
try:
|
|
331
|
+
result = subprocess.run(['terraform', '--version'], capture_output=True, text=True)
|
|
332
|
+
if result.returncode == 0:
|
|
333
|
+
return True, result.stdout.strip().split('\n')[0]
|
|
334
|
+
else:
|
|
335
|
+
return False, "Terraform binary not found or not working"
|
|
336
|
+
except FileNotFoundError:
|
|
337
|
+
return False, "Terraform binary not found in PATH"
|
|
338
|
+
except Exception as e:
|
|
339
|
+
return False, f"Error checking terraform binary: {e}"
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def check_terraform_config_files():
|
|
343
|
+
"""Check if Terraform configuration files exist in current directory"""
|
|
344
|
+
tf_files = glob.glob('*.tf') + glob.glob('*.tf.json')
|
|
345
|
+
if tf_files:
|
|
346
|
+
return True, tf_files
|
|
347
|
+
else:
|
|
348
|
+
return False, []
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def check_terraform_init():
|
|
352
|
+
"""Check if Terraform has been initialized"""
|
|
353
|
+
terraform_dir = Path('.terraform')
|
|
354
|
+
if terraform_dir.exists() and terraform_dir.is_dir():
|
|
355
|
+
# Check for providers
|
|
356
|
+
providers_dir = terraform_dir / 'providers'
|
|
357
|
+
if providers_dir.exists():
|
|
358
|
+
return True, "Terraform is initialized"
|
|
359
|
+
else:
|
|
360
|
+
return False, "Terraform directory exists but no providers found"
|
|
361
|
+
else:
|
|
362
|
+
return False, "Terraform not initialized (.terraform directory not found)"
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
def check_module_exists(module_name):
|
|
366
|
+
"""Check if the specified module exists in Terraform configuration"""
|
|
367
|
+
try:
|
|
368
|
+
# Remove 'module.' prefix if present
|
|
369
|
+
clean_module_name = module_name.replace('module.', '')
|
|
370
|
+
|
|
371
|
+
# Method 1: Check .tf files directly for module definitions
|
|
372
|
+
tf_files = glob.glob('*.tf') + glob.glob('*.tf.json')
|
|
373
|
+
for tf_file in tf_files:
|
|
374
|
+
try:
|
|
375
|
+
with open(tf_file, 'r') as f:
|
|
376
|
+
content = f.read()
|
|
377
|
+
# Look for module "module_name" blocks
|
|
378
|
+
if f'module "{clean_module_name}"' in content or f"module '{clean_module_name}'" in content:
|
|
379
|
+
return True, f"Module '{clean_module_name}' found in {tf_file}"
|
|
380
|
+
except Exception as e:
|
|
381
|
+
print(f"Warning: Could not read {tf_file}: {e}")
|
|
382
|
+
continue
|
|
383
|
+
|
|
384
|
+
# Method 2: Try terraform show -json as fallback
|
|
385
|
+
try:
|
|
386
|
+
result = subprocess.run(
|
|
387
|
+
['terraform', 'show', '-json'],
|
|
388
|
+
capture_output=True,
|
|
389
|
+
text=True,
|
|
390
|
+
cwd=Path.cwd()
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
if result.returncode == 0:
|
|
394
|
+
state_data = json.loads(result.stdout)
|
|
395
|
+
|
|
396
|
+
# Check if module exists in configuration
|
|
397
|
+
if 'configuration' in state_data and state_data['configuration']:
|
|
398
|
+
modules = state_data.get('configuration', {}).get('root_module', {}).get('module_calls', {})
|
|
399
|
+
if clean_module_name in modules:
|
|
400
|
+
return True, f"Module '{clean_module_name}' found in Terraform state"
|
|
401
|
+
|
|
402
|
+
# Also check in planned_values for modules
|
|
403
|
+
if 'planned_values' in state_data and state_data['planned_values']:
|
|
404
|
+
modules = state_data.get('planned_values', {}).get('root_module', {}).get('child_modules', [])
|
|
405
|
+
for module in modules:
|
|
406
|
+
module_addr = module.get('address', '')
|
|
407
|
+
if clean_module_name in module_addr:
|
|
408
|
+
return True, f"Module '{clean_module_name}' found in planned values"
|
|
409
|
+
except (subprocess.SubprocessError, json.JSONDecodeError) as e:
|
|
410
|
+
print(f"Warning: Could not check terraform state: {e}")
|
|
411
|
+
|
|
412
|
+
return False, f"Module '{clean_module_name}' not found in Terraform configuration files"
|
|
413
|
+
|
|
414
|
+
except Exception as e:
|
|
415
|
+
return False, f"Error checking module existence: {e}"
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
def validate_terraform_environment(module_name, verbose=False):
|
|
419
|
+
"""Validate the complete Terraform environment"""
|
|
420
|
+
print("\n Validating Terraform environment...")
|
|
421
|
+
|
|
422
|
+
# 1. Check terraform binary
|
|
423
|
+
print("\n Checking Terraform binary...")
|
|
424
|
+
has_terraform, terraform_msg = check_terraform_binary()
|
|
425
|
+
if not has_terraform:
|
|
426
|
+
raise Exception(f" Terraform not available: {terraform_msg}")
|
|
427
|
+
if verbose:
|
|
428
|
+
print(f" {terraform_msg}")
|
|
429
|
+
else:
|
|
430
|
+
print(" Terraform binary found")
|
|
431
|
+
|
|
432
|
+
# 2. Check for configuration files
|
|
433
|
+
print("\n Checking Terraform configuration files...")
|
|
434
|
+
has_config, config_files = check_terraform_config_files()
|
|
435
|
+
if not has_config:
|
|
436
|
+
raise Exception(" No Terraform configuration files (.tf or .tf.json) found in current directory")
|
|
437
|
+
if verbose:
|
|
438
|
+
print(f" Found {len(config_files)} configuration files: {', '.join(config_files)}")
|
|
439
|
+
else:
|
|
440
|
+
print(f" Found {len(config_files)} Terraform configuration files")
|
|
441
|
+
|
|
442
|
+
# 3. Check if terraform is initialized
|
|
443
|
+
print("\n Checking Terraform initialization...")
|
|
444
|
+
is_initialized, init_msg = check_terraform_init()
|
|
445
|
+
if not is_initialized:
|
|
446
|
+
raise Exception(f" {init_msg}. Run 'terraform init' first.")
|
|
447
|
+
if verbose:
|
|
448
|
+
print(f" {init_msg}")
|
|
449
|
+
else:
|
|
450
|
+
print(" Terraform is initialized")
|
|
451
|
+
|
|
452
|
+
# 4. Check if the specified module exists
|
|
453
|
+
print(f"\n Checking if module '{module_name}' exists...")
|
|
454
|
+
module_exists, module_msg = check_module_exists(module_name)
|
|
455
|
+
if not module_exists:
|
|
456
|
+
raise Exception(f" {module_msg}. Please add the module to your Terraform configuration first.")
|
|
457
|
+
if verbose:
|
|
458
|
+
print(f" {module_msg}")
|
|
459
|
+
else:
|
|
460
|
+
print(f" Module '{module_name}' found")
|
|
461
|
+
|
|
462
|
+
# 5. Check if modules are properly installed by running terraform validate
|
|
463
|
+
print("\n Checking if modules are properly installed...")
|
|
464
|
+
try:
|
|
465
|
+
result = subprocess.run(
|
|
466
|
+
['terraform', 'validate'],
|
|
467
|
+
capture_output=True,
|
|
468
|
+
text=True,
|
|
469
|
+
cwd=Path.cwd()
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
if result.returncode != 0:
|
|
473
|
+
error_output = result.stderr.strip()
|
|
474
|
+
if "module is not yet installed" in error_output or "Module not installed" in error_output:
|
|
475
|
+
raise Exception(f" Terraform modules are not installed. Please run 'terraform init' to install all required modules.")
|
|
476
|
+
else:
|
|
477
|
+
raise Exception(f" Terraform validation failed:\n\n{error_output}")
|
|
478
|
+
|
|
479
|
+
print(" All modules are properly installed")
|
|
480
|
+
|
|
481
|
+
except subprocess.SubprocessError as e:
|
|
482
|
+
raise Exception(f" Failed to validate Terraform configuration: {e}")
|
|
483
|
+
|
|
484
|
+
print("\n All Terraform environment checks passed!")
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def check_terraform_config_files():
|
|
489
|
+
"""Check if Terraform configuration files exist in current directory"""
|
|
490
|
+
tf_files = glob.glob('*.tf') + glob.glob('*.tf.json')
|
|
491
|
+
if tf_files:
|
|
492
|
+
return True, tf_files
|
|
493
|
+
else:
|
|
494
|
+
return False, []
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
def check_terraform_init():
|
|
498
|
+
"""Check if Terraform has been initialized"""
|
|
499
|
+
terraform_dir = Path('.terraform')
|
|
500
|
+
if terraform_dir.exists() and terraform_dir.is_dir():
|
|
501
|
+
# Check for providers
|
|
502
|
+
providers_dir = terraform_dir / 'providers'
|
|
503
|
+
if providers_dir.exists():
|
|
504
|
+
return True, "Terraform is initialized"
|
|
505
|
+
else:
|
|
506
|
+
return False, "Terraform directory exists but no providers found"
|
|
507
|
+
else:
|
|
508
|
+
return False, "Terraform not initialized (.terraform directory not found)"
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
def check_module_exists(module_name):
|
|
512
|
+
"""Check if the specified module exists in Terraform configuration"""
|
|
513
|
+
try:
|
|
514
|
+
# Remove 'module.' prefix if present
|
|
515
|
+
clean_module_name = module_name.replace('module.', '')
|
|
516
|
+
|
|
517
|
+
# Method 1: Check .tf files directly for module definitions
|
|
518
|
+
tf_files = glob.glob('*.tf') + glob.glob('*.tf.json')
|
|
519
|
+
for tf_file in tf_files:
|
|
520
|
+
try:
|
|
521
|
+
with open(tf_file, 'r') as f:
|
|
522
|
+
content = f.read()
|
|
523
|
+
# Look for module "module_name" blocks
|
|
524
|
+
if f'module "{clean_module_name}"' in content or f"module '{clean_module_name}'" in content:
|
|
525
|
+
return True, f"Module '{clean_module_name}' found in {tf_file}"
|
|
526
|
+
except Exception as e:
|
|
527
|
+
print(f"Warning: Could not read {tf_file}: {e}")
|
|
528
|
+
continue
|
|
529
|
+
|
|
530
|
+
# Method 2: Try terraform show -json as fallback
|
|
531
|
+
try:
|
|
532
|
+
result = subprocess.run(
|
|
533
|
+
['terraform', 'show', '-json'],
|
|
534
|
+
capture_output=True,
|
|
535
|
+
text=True,
|
|
536
|
+
cwd=Path.cwd()
|
|
537
|
+
)
|
|
538
|
+
|
|
539
|
+
if result.returncode == 0:
|
|
540
|
+
state_data = json.loads(result.stdout)
|
|
541
|
+
|
|
542
|
+
# Check if module exists in configuration
|
|
543
|
+
if 'configuration' in state_data and state_data['configuration']:
|
|
544
|
+
modules = state_data.get('configuration', {}).get('root_module', {}).get('module_calls', {})
|
|
545
|
+
if clean_module_name in modules:
|
|
546
|
+
return True, f"Module '{clean_module_name}' found in Terraform state"
|
|
547
|
+
|
|
548
|
+
# Also check in planned_values for modules
|
|
549
|
+
if 'planned_values' in state_data and state_data['planned_values']:
|
|
550
|
+
modules = state_data.get('planned_values', {}).get('root_module', {}).get('child_modules', [])
|
|
551
|
+
for module in modules:
|
|
552
|
+
module_addr = module.get('address', '')
|
|
553
|
+
if clean_module_name in module_addr:
|
|
554
|
+
return True, f"Module '{clean_module_name}' found in planned values"
|
|
555
|
+
except (subprocess.SubprocessError, json.JSONDecodeError) as e:
|
|
556
|
+
print(f"Warning: Could not check terraform state: {e}")
|
|
557
|
+
|
|
558
|
+
return False, f"Module '{clean_module_name}' not found in Terraform configuration files"
|
|
559
|
+
|
|
560
|
+
except Exception as e:
|
|
561
|
+
return False, f"Error checking module existence: {e}"
|
|
@@ -187,7 +187,7 @@ def export_wf_rules_to_json(args, configuration):
|
|
|
187
187
|
processed_sections = []
|
|
188
188
|
for index, section_data in enumerate(sections_with_ids):
|
|
189
189
|
processed_sections.append({
|
|
190
|
-
"section_index": index,
|
|
190
|
+
"section_index": index+1,
|
|
191
191
|
"section_name": section_data['section']['name'],
|
|
192
192
|
"section_id": section_data['section']['id']
|
|
193
193
|
})
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import json
|
|
3
|
+
import traceback
|
|
4
|
+
import sys
|
|
3
5
|
from graphql_client.api.call_api import ApiClient, CallApi
|
|
4
6
|
from graphql_client.api_client import ApiException
|
|
5
7
|
from ..customLib import writeDataToFile, makeCall, getAccountID
|
|
@@ -8,9 +10,19 @@ def export_socket_site_to_json(args, configuration):
|
|
|
8
10
|
"""
|
|
9
11
|
Export consolidated site and socket data to JSON format
|
|
10
12
|
"""
|
|
11
|
-
processed_data = {'sites':
|
|
13
|
+
processed_data = {'sites':[]}
|
|
14
|
+
warning_stats = {
|
|
15
|
+
'missing_sites': 0,
|
|
16
|
+
'missing_interfaces': 0,
|
|
17
|
+
'missing_data': 0,
|
|
18
|
+
'missing_interface_details': []
|
|
19
|
+
}
|
|
12
20
|
|
|
13
21
|
try:
|
|
22
|
+
settings = {}
|
|
23
|
+
with open(os.path.join(os.path.dirname(__file__), '../../../../settings.json'), 'r', encoding='utf-8') as f:
|
|
24
|
+
settings = json.load(f)
|
|
25
|
+
|
|
14
26
|
account_id = getAccountID(args, configuration)
|
|
15
27
|
# Get account snapshot with siteIDs if provided
|
|
16
28
|
# Get siteIDs from args if provided (comma-separated string)
|
|
@@ -32,68 +44,162 @@ def export_socket_site_to_json(args, configuration):
|
|
|
32
44
|
##################################################################
|
|
33
45
|
## Create processed_data object indexed by siteId with location ##
|
|
34
46
|
##################################################################
|
|
35
|
-
for
|
|
47
|
+
for snapshot_site in snapshot_sites['data']['accountSnapshot']['sites']:
|
|
36
48
|
cur_site = {
|
|
37
|
-
'wan_interfaces':
|
|
38
|
-
'lan_interfaces':
|
|
49
|
+
'wan_interfaces': [],
|
|
50
|
+
'lan_interfaces': [],
|
|
39
51
|
}
|
|
40
|
-
site_id =
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
52
|
+
site_id = snapshot_site.get('id')
|
|
53
|
+
connectionType = snapshot_site.get('infoSiteSnapshot', {}).get('connType', "")
|
|
54
|
+
if connectionType not in settings["ignore_export_by_socket_type"]:
|
|
55
|
+
cur_site['id'] = site_id
|
|
56
|
+
cur_site['name'] = snapshot_site.get('infoSiteSnapshot', {}).get('name')
|
|
57
|
+
cur_site['description'] = snapshot_site.get('infoSiteSnapshot', {}).get('description')
|
|
58
|
+
cur_site['connectionType'] = connectionType
|
|
59
|
+
cur_site['type'] = snapshot_site.get('infoSiteSnapshot', {}).get('type')
|
|
60
|
+
cur_site = populateSiteLocationData(args, snapshot_site, cur_site)
|
|
61
|
+
|
|
62
|
+
site_interfaces = snapshot_site.get('infoSiteSnapshot', {}).get('interfaces', [])
|
|
63
|
+
for wan_ni in site_interfaces:
|
|
64
|
+
cur_wan_interface = {}
|
|
65
|
+
role = wan_ni.get('wanRoleInterfaceInfo', "")
|
|
66
|
+
if role is not None and role[0:3] == "wan":
|
|
67
|
+
if connectionType == "SOCKET_X1500":
|
|
68
|
+
cur_wan_interface['id'] = site_id+":"+ wan_ni.get('id', "")
|
|
69
|
+
else:
|
|
70
|
+
cur_wan_interface['id'] = site_id+":INT_"+ wan_ni.get('id', "")
|
|
71
|
+
cur_wan_interface['id'] = site_id+":INT_"+ wan_ni.get('id', "")
|
|
72
|
+
cur_wan_interface['name'] = wan_ni.get('name', "")
|
|
73
|
+
cur_wan_interface['upstreamBandwidth'] = wan_ni.get('upstreamBandwidth', 0)
|
|
74
|
+
cur_wan_interface['downstreamBandwidth'] = wan_ni.get('downstreamBandwidth', 0)
|
|
75
|
+
cur_wan_interface['destType'] = wan_ni.get('destType', "")
|
|
76
|
+
cur_wan_interface['role'] = role
|
|
77
|
+
cur_site['wan_interfaces'].append(cur_wan_interface)
|
|
78
|
+
|
|
79
|
+
if site_id:
|
|
80
|
+
processed_data['sites'].append(cur_site)
|
|
62
81
|
|
|
63
82
|
##################################################################################
|
|
64
83
|
## Process entity lookup LAN network interfaces adding to site object by site_id##
|
|
65
84
|
##################################################################################
|
|
85
|
+
interface_map = {}
|
|
66
86
|
for lan_ni in entity_network_interfaces:
|
|
67
87
|
cur_lan_interface = {
|
|
68
|
-
'network_ranges':
|
|
88
|
+
'network_ranges': [],
|
|
69
89
|
}
|
|
70
|
-
site_id = lan_ni.get("helperFields","").get('siteId', "")
|
|
71
|
-
id = lan_ni.get('entity', "").get('id', "")
|
|
72
|
-
interfaceName = lan_ni.get('
|
|
90
|
+
site_id = str(lan_ni.get("helperFields","").get('siteId', ""))
|
|
91
|
+
id = str(lan_ni.get('entity', "").get('id', ""))
|
|
92
|
+
interfaceName = lan_ni.get('helperFields', "").get('interfaceName', "")
|
|
73
93
|
cur_lan_interface['id'] = id
|
|
74
94
|
cur_lan_interface['name'] = interfaceName
|
|
95
|
+
# Split interfaceName on " \ " and take the last element
|
|
75
96
|
cur_lan_interface['index'] = lan_ni.get("helperFields","").get('interfaceId', "")
|
|
76
|
-
cur_lan_interface['
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
97
|
+
cur_lan_interface['destType'] = lan_ni.get("helperFields","").get('destType', "")
|
|
98
|
+
|
|
99
|
+
# Create a composite key for interface mapping that includes site_id
|
|
100
|
+
interface_key = f"{site_id}_{interfaceName}"
|
|
101
|
+
interface_map[interface_key] = id
|
|
102
|
+
|
|
103
|
+
# Only add interface if the site exists in processed_data
|
|
104
|
+
site_entry = next((site for site in processed_data['sites'] if site['id'] == site_id), None)
|
|
105
|
+
if site_entry:
|
|
106
|
+
site_entry['lan_interfaces'].append(cur_lan_interface)
|
|
107
|
+
else:
|
|
108
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
109
|
+
print(f"WARNING: Site {site_id} not found in snapshot data, skipping interface {interfaceName} ({id})")
|
|
80
110
|
|
|
81
111
|
#############################################################################
|
|
82
112
|
## Process entity lookup network ranges populating by network interface id ##
|
|
83
113
|
#############################################################################
|
|
84
114
|
for range in entity_network_ranges:
|
|
115
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
116
|
+
print(f"Processing network range: {type(range)} - {range}")
|
|
85
117
|
cur_range = {}
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
interface_name = lan_ni.get('entity', "").get('interfaceName', "")
|
|
89
|
-
cur_lan_interface['id'] = id
|
|
90
|
-
cur_lan_interface['subnet'] = lan_ni.get("helperFields","").get('subnet', "")
|
|
91
|
-
cur_lan_interface['vlanTag'] = lan_ni.get("helperFields","").get('vlanTag', "")
|
|
92
|
-
cur_lan_interface['microsegmentation'] = lan_ni.get("helperFields","").get('microsegmentation', "")
|
|
118
|
+
helper_fields = range.get("helperFields", {})
|
|
119
|
+
entity_data = range.get('entity', {})
|
|
93
120
|
|
|
94
|
-
|
|
95
|
-
|
|
121
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
122
|
+
print(f" helperFields type: {type(helper_fields)}, value: {helper_fields}")
|
|
123
|
+
print(f" entity type: {type(entity_data)}, value: {entity_data}")
|
|
124
|
+
|
|
125
|
+
range_id = entity_data.get('id', "")
|
|
126
|
+
site_id = str(helper_fields.get('siteId', ""))
|
|
127
|
+
interface_name = str(helper_fields.get('interfaceName', ""))
|
|
128
|
+
# Use the composite key to lookup interface_id
|
|
129
|
+
interface_key = f"{site_id}_{interface_name}"
|
|
130
|
+
interface_id = str(interface_map.get(interface_key, ""))
|
|
131
|
+
cur_range['id'] = range_id
|
|
132
|
+
range_name = entity_data.get('name', "")
|
|
133
|
+
if range_name and " \\ " in range_name:
|
|
134
|
+
cur_range['rangeName'] = range_name.split(" \\ ").pop()
|
|
135
|
+
else:
|
|
136
|
+
cur_range['rangeName'] = range_name
|
|
137
|
+
cur_range['name'] = range_name
|
|
138
|
+
cur_range['subnet'] = helper_fields.get('subnet', "")
|
|
139
|
+
cur_range['vlanTag'] = helper_fields.get('vlanTag', "")
|
|
140
|
+
cur_range['microsegmentation'] = helper_fields.get('microsegmentation', "")
|
|
141
|
+
|
|
142
|
+
# Safely add to processed_data with existence checks
|
|
143
|
+
if site_id and interface_id and range_id:
|
|
144
|
+
site_entry = next((site for site in processed_data['sites'] if site['id'] == site_id), None)
|
|
145
|
+
if not site_entry:
|
|
146
|
+
# print(f"WARNING: Site ID {site_id} not found in processed_data")
|
|
147
|
+
warning_stats['missing_sites'] += 1
|
|
148
|
+
continue
|
|
149
|
+
|
|
150
|
+
# Find the interface in the lan_interfaces array
|
|
151
|
+
interface_entry = next((iface for iface in site_entry['lan_interfaces'] if iface['id'] == interface_id), None)
|
|
152
|
+
if not interface_entry:
|
|
153
|
+
print(f"WARNING: Interface {interface_id} (name: {interface_name}) not found in site {site_id}. Range {range_id} will be skipped.")
|
|
154
|
+
warning_stats['missing_interfaces'] += 1
|
|
155
|
+
warning_stats['missing_interface_details'].append({
|
|
156
|
+
'interface_id': interface_id,
|
|
157
|
+
'interface_name': interface_name,
|
|
158
|
+
'site_id': site_id,
|
|
159
|
+
'range_id': range_id
|
|
160
|
+
})
|
|
161
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
162
|
+
available_interfaces = [iface['id'] for iface in site_entry['lan_interfaces']]
|
|
163
|
+
print(f" Available interfaces in site {site_id}: {available_interfaces}")
|
|
164
|
+
print(f" Looked up interface with key: {interface_key}")
|
|
165
|
+
continue
|
|
166
|
+
interface_entry['network_ranges'].append(cur_range)
|
|
167
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
168
|
+
print(f" Successfully added range {range_id} to site {site_id}, interface_name {interface_name} with interface_id {interface_id}")
|
|
169
|
+
else:
|
|
170
|
+
if not interface_id:
|
|
171
|
+
print(f"WARNING: Interface lookup failed for range {range_id}. Site: {site_id}, Interface name: {interface_name}, Lookup key: {interface_key}")
|
|
172
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
173
|
+
print(f" Available interface keys: {list(interface_map.keys())[:10]}...") # Show first 10 keys
|
|
174
|
+
else:
|
|
175
|
+
print(f"WARNING: Missing required data for range: site_id={site_id}, interface_id={interface_id}, range_id={range_id}")
|
|
176
|
+
warning_stats['missing_data'] += 1
|
|
96
177
|
|
|
178
|
+
# Print warning summary
|
|
179
|
+
total_warnings = warning_stats['missing_sites'] + warning_stats['missing_interfaces'] + warning_stats['missing_data']
|
|
180
|
+
if total_warnings > 0:
|
|
181
|
+
print(f"\n=== WARNING SUMMARY ===")
|
|
182
|
+
print(f"Total warnings: {total_warnings}")
|
|
183
|
+
print(f"- Missing sites: {warning_stats['missing_sites']}")
|
|
184
|
+
print(f"- Missing interfaces: {warning_stats['missing_interfaces']}")
|
|
185
|
+
print(f"- Missing data: {warning_stats['missing_data']}")
|
|
186
|
+
|
|
187
|
+
if warning_stats['missing_interfaces'] > 0:
|
|
188
|
+
print(f"\nMissing interface details:")
|
|
189
|
+
unique_interfaces = {}
|
|
190
|
+
for detail in warning_stats['missing_interface_details']:
|
|
191
|
+
key = f"{detail['interface_id']} ({detail['interface_name']})"
|
|
192
|
+
if key not in unique_interfaces:
|
|
193
|
+
unique_interfaces[key] = []
|
|
194
|
+
unique_interfaces[key].append(detail['site_id'])
|
|
195
|
+
|
|
196
|
+
for interface, sites in unique_interfaces.items():
|
|
197
|
+
print(f" - Interface {interface} missing in sites: {', '.join(sites)}")
|
|
198
|
+
|
|
199
|
+
print(f"\nThese warnings indicate network ranges that reference interfaces that don't exist in the site data.")
|
|
200
|
+
print(f"This is usually caused by data inconsistencies and can be safely ignored if the export completes successfully.")
|
|
201
|
+
print(f"=========================\n")
|
|
202
|
+
|
|
97
203
|
# Write the processed data to file using the general-purpose function
|
|
98
204
|
output_file = writeDataToFile(
|
|
99
205
|
data=processed_data,
|
|
@@ -106,8 +212,33 @@ def export_socket_site_to_json(args, configuration):
|
|
|
106
212
|
return [{"success": True, "output_file": output_file, "account_id": account_id}]
|
|
107
213
|
|
|
108
214
|
except Exception as e:
|
|
109
|
-
|
|
110
|
-
|
|
215
|
+
# Get the current exception info
|
|
216
|
+
exc_type, exc_value, exc_traceback = sys.exc_info()
|
|
217
|
+
|
|
218
|
+
# Get the line number where the error occurred
|
|
219
|
+
line_number = exc_traceback.tb_lineno
|
|
220
|
+
filename = exc_traceback.tb_frame.f_code.co_filename
|
|
221
|
+
function_name = exc_traceback.tb_frame.f_code.co_name
|
|
222
|
+
|
|
223
|
+
# Get the full traceback as a string
|
|
224
|
+
full_traceback = traceback.format_exc()
|
|
225
|
+
|
|
226
|
+
# Create detailed error message
|
|
227
|
+
error_details = {
|
|
228
|
+
"error_type": exc_type.__name__,
|
|
229
|
+
"error_message": str(exc_value),
|
|
230
|
+
"line_number": line_number,
|
|
231
|
+
"function_name": function_name,
|
|
232
|
+
"filename": os.path.basename(filename),
|
|
233
|
+
"full_traceback": full_traceback
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
# Print detailed error information
|
|
237
|
+
print(f"ERROR: {exc_type.__name__}: {str(exc_value)}")
|
|
238
|
+
print(f"Location: {os.path.basename(filename)}:{line_number} in {function_name}()")
|
|
239
|
+
print(f"Full traceback:\n{full_traceback}")
|
|
240
|
+
|
|
241
|
+
return [{"success": False, "error": str(e), "error_details": error_details}]
|
|
111
242
|
|
|
112
243
|
|
|
113
244
|
##########################################################################
|
|
@@ -135,16 +266,17 @@ def populateSiteLocationData(args, site_data, cur_site):
|
|
|
135
266
|
print(f"Warning: Could not load site location data: {e}")
|
|
136
267
|
|
|
137
268
|
## siteLocation attributes
|
|
138
|
-
cur_site['
|
|
139
|
-
cur_site['
|
|
140
|
-
cur_site['
|
|
141
|
-
cur_site['
|
|
142
|
-
cur_site['
|
|
269
|
+
cur_site['site_location'] = {}
|
|
270
|
+
cur_site['site_location']['address'] = site_data.get('infoSiteSnapshot', {}).get('address')
|
|
271
|
+
cur_site['site_location']['city'] = site_data.get('infoSiteSnapshot', {}).get('cityName')
|
|
272
|
+
cur_site['site_location']['stateName'] = site_data.get('infoSiteSnapshot', {}).get('countryStateName')
|
|
273
|
+
cur_site['site_location']['countryCode'] = site_data.get('infoSiteSnapshot', {}).get('countryCode')
|
|
274
|
+
cur_site['site_location']['countryName'] = site_data.get('infoSiteSnapshot', {}).get('countryName')
|
|
143
275
|
|
|
144
276
|
# Look up timezone and state code from location data
|
|
145
|
-
country_name = cur_site['countryName']
|
|
146
|
-
state_name = cur_site['stateName']
|
|
147
|
-
city = cur_site['city']
|
|
277
|
+
country_name = cur_site['site_location']['countryName']
|
|
278
|
+
state_name = cur_site['site_location']['stateName']
|
|
279
|
+
city = cur_site['site_location']['city']
|
|
148
280
|
|
|
149
281
|
# Create lookup key based on available data
|
|
150
282
|
if state_name:
|
|
@@ -173,7 +305,7 @@ def populateSiteLocationData(args, site_data, cur_site):
|
|
|
173
305
|
|
|
174
306
|
# Get timezone - always use the 0 element in the timezones array
|
|
175
307
|
timezones = location_data.get('timezone', [])
|
|
176
|
-
cur_site['timezone'] = timezones[0] if timezones else None
|
|
308
|
+
cur_site['site_location']['timezone'] = timezones[0] if timezones else None
|
|
177
309
|
return cur_site
|
|
178
310
|
|
|
179
311
|
def getEntityLookup(args, configuration, account_id, entity_type):
|