catocli 2.1.2__py3-none-any.whl → 2.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of catocli might be problematic. Click here for more details.
- catocli/Utils/clidriver.py +18 -18
- catocli/Utils/cliutils.py +165 -0
- catocli/Utils/csv_formatter.py +652 -0
- catocli/__init__.py +1 -1
- catocli/parsers/custom/export_rules/__init__.py +0 -4
- catocli/parsers/custom/export_sites/__init__.py +4 -3
- catocli/parsers/custom/export_sites/export_sites.py +198 -55
- catocli/parsers/custom/import_sites_to_tf/import_sites_to_tf.py +473 -393
- catocli/parsers/customParserApiClient.py +444 -38
- catocli/parsers/custom_private/__init__.py +19 -13
- catocli/parsers/mutation_accountManagement/__init__.py +21 -0
- catocli/parsers/mutation_accountManagement_disableAccount/README.md +15 -0
- catocli/parsers/mutation_admin/__init__.py +12 -0
- catocli/parsers/mutation_container/__init__.py +18 -0
- catocli/parsers/mutation_enterpriseDirectory/__init__.py +8 -0
- catocli/parsers/mutation_groups/__init__.py +6 -0
- catocli/parsers/mutation_hardware/__init__.py +2 -0
- catocli/parsers/mutation_policy/__init__.py +378 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_addRule/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_addSection/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_createPolicyRevision/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_discardPolicyRevision/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_moveRule/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_moveSection/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_publishPolicyRevision/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_removeRule/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_removeSection/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_updatePolicy/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_updateRule/README.md +20 -0
- catocli/parsers/mutation_policy_antiMalwareFileHash_updateSection/README.md +20 -0
- catocli/parsers/mutation_sandbox/__init__.py +4 -0
- catocli/parsers/mutation_site/__init__.py +72 -0
- catocli/parsers/mutation_sites/__init__.py +72 -0
- catocli/parsers/mutation_xdr/__init__.py +6 -0
- catocli/parsers/query_accountBySubdomain/__init__.py +2 -0
- catocli/parsers/query_accountManagement/__init__.py +2 -0
- catocli/parsers/query_accountMetrics/__init__.py +6 -0
- catocli/parsers/query_accountRoles/__init__.py +2 -0
- catocli/parsers/query_accountSnapshot/__init__.py +2 -0
- catocli/parsers/query_admin/__init__.py +2 -0
- catocli/parsers/query_admins/__init__.py +2 -0
- catocli/parsers/query_appStats/__init__.py +6 -0
- catocli/parsers/query_appStatsTimeSeries/README.md +3 -0
- catocli/parsers/query_appStatsTimeSeries/__init__.py +6 -0
- catocli/parsers/query_auditFeed/__init__.py +2 -0
- catocli/parsers/query_catalogs/__init__.py +2 -0
- catocli/parsers/query_container/__init__.py +2 -0
- catocli/parsers/query_devices/README.md +1 -1
- catocli/parsers/query_devices/__init__.py +2 -0
- catocli/parsers/query_enterpriseDirectory/__init__.py +2 -0
- catocli/parsers/query_entityLookup/__init__.py +2 -0
- catocli/parsers/query_events/__init__.py +2 -0
- catocli/parsers/query_eventsFeed/__init__.py +2 -0
- catocli/parsers/query_eventsTimeSeries/__init__.py +2 -0
- catocli/parsers/query_groups/__init__.py +6 -0
- catocli/parsers/query_hardware/README.md +1 -1
- catocli/parsers/query_hardware/__init__.py +2 -0
- catocli/parsers/query_hardwareManagement/__init__.py +2 -0
- catocli/parsers/query_licensing/__init__.py +2 -0
- catocli/parsers/query_policy/__init__.py +37 -0
- catocli/parsers/query_policy_antiMalwareFileHash_policy/README.md +19 -0
- catocli/parsers/query_popLocations/__init__.py +2 -0
- catocli/parsers/query_sandbox/__init__.py +2 -0
- catocli/parsers/query_servicePrincipalAdmin/__init__.py +2 -0
- catocli/parsers/query_site/__init__.py +33 -0
- catocli/parsers/query_siteLocation/__init__.py +2 -0
- catocli/parsers/query_site_siteGeneralDetails/README.md +19 -0
- catocli/parsers/query_socketPortMetrics/__init__.py +2 -0
- catocli/parsers/query_socketPortMetricsTimeSeries/__init__.py +6 -0
- catocli/parsers/query_subDomains/__init__.py +2 -0
- catocli/parsers/query_xdr/__init__.py +4 -0
- catocli/parsers/raw/__init__.py +3 -1
- {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/METADATA +1 -1
- {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/RECORD +98 -66
- models/mutation.accountManagement.disableAccount.json +545 -0
- models/mutation.policy.antiMalwareFileHash.addRule.json +2068 -0
- models/mutation.policy.antiMalwareFileHash.addSection.json +1350 -0
- models/mutation.policy.antiMalwareFileHash.createPolicyRevision.json +1822 -0
- models/mutation.policy.antiMalwareFileHash.discardPolicyRevision.json +1758 -0
- models/mutation.policy.antiMalwareFileHash.moveRule.json +1552 -0
- models/mutation.policy.antiMalwareFileHash.moveSection.json +1251 -0
- models/mutation.policy.antiMalwareFileHash.publishPolicyRevision.json +1813 -0
- models/mutation.policy.antiMalwareFileHash.removeRule.json +1204 -0
- models/mutation.policy.antiMalwareFileHash.removeSection.json +954 -0
- models/mutation.policy.antiMalwareFileHash.updatePolicy.json +1834 -0
- models/mutation.policy.antiMalwareFileHash.updateRule.json +1757 -0
- models/mutation.policy.antiMalwareFileHash.updateSection.json +1105 -0
- models/mutation.site.updateSiteGeneralDetails.json +3 -3
- models/mutation.sites.updateSiteGeneralDetails.json +3 -3
- models/query.devices.json +249 -2
- models/query.hardware.json +224 -0
- models/query.policy.antiMalwareFileHash.policy.json +1583 -0
- models/query.site.siteGeneralDetails.json +899 -0
- schema/catolib.py +52 -14
- {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/WHEEL +0 -0
- {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/entry_points.txt +0 -0
- {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/licenses/LICENSE +0 -0
- {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/top_level.txt +0 -0
|
@@ -2,10 +2,43 @@ import os
|
|
|
2
2
|
import json
|
|
3
3
|
import traceback
|
|
4
4
|
import sys
|
|
5
|
+
import ipaddress
|
|
5
6
|
from datetime import datetime
|
|
6
7
|
from graphql_client.api.call_api import ApiClient, CallApi
|
|
7
8
|
from graphql_client.api_client import ApiException
|
|
8
9
|
from ..customLib import writeDataToFile, makeCall, getAccountID
|
|
10
|
+
from ....Utils.cliutils import load_cli_settings
|
|
11
|
+
|
|
12
|
+
def calculateLocalIp(subnet):
|
|
13
|
+
"""
|
|
14
|
+
Calculate the first usable IP address from a subnet/CIDR notation.
|
|
15
|
+
Returns the network address + 1 (first host IP).
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
subnet (str): Subnet in CIDR notation (e.g., "192.168.1.0/24")
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
str: First usable IP address, or None if invalid subnet
|
|
22
|
+
"""
|
|
23
|
+
if not subnet or subnet == "":
|
|
24
|
+
return None
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
# Parse the subnet
|
|
28
|
+
network = ipaddress.IPv4Network(subnet, strict=False)
|
|
29
|
+
|
|
30
|
+
# Get the first usable IP (network address + 1)
|
|
31
|
+
# For /31 and /32 networks, return the network address itself
|
|
32
|
+
if network.prefixlen >= 31:
|
|
33
|
+
return str(network.network_address)
|
|
34
|
+
else:
|
|
35
|
+
# Return network + 1 (first host address)
|
|
36
|
+
first_host = network.network_address + 1
|
|
37
|
+
return str(first_host)
|
|
38
|
+
|
|
39
|
+
except (ipaddress.AddressValueError, ipaddress.NetmaskValueError, ValueError) as e:
|
|
40
|
+
# Invalid subnet format
|
|
41
|
+
return None
|
|
9
42
|
|
|
10
43
|
def export_socket_site_to_json(args, configuration):
|
|
11
44
|
"""
|
|
@@ -20,10 +53,11 @@ def export_socket_site_to_json(args, configuration):
|
|
|
20
53
|
}
|
|
21
54
|
|
|
22
55
|
try:
|
|
23
|
-
settings
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
56
|
+
# Load CLI settings using the robust function
|
|
57
|
+
settings = load_cli_settings()
|
|
58
|
+
if not settings:
|
|
59
|
+
raise ValueError("Unable to load clisettings.json. Cannot proceed with export.")
|
|
60
|
+
|
|
27
61
|
account_id = getAccountID(args, configuration)
|
|
28
62
|
# Get account snapshot with siteIDs if provided
|
|
29
63
|
# Get siteIDs from args if provided (comma-separated string)
|
|
@@ -38,6 +72,26 @@ def export_socket_site_to_json(args, configuration):
|
|
|
38
72
|
## Call APIs to retrieve sites, interface and network ranges ##
|
|
39
73
|
###############################################################
|
|
40
74
|
snapshot_sites = getAccountSnapshot(args, configuration, account_id, site_ids)
|
|
75
|
+
|
|
76
|
+
# Check if no sites were found and handle gracefully
|
|
77
|
+
sites_list = snapshot_sites['data']['accountSnapshot']['sites']
|
|
78
|
+
if not sites_list or len(sites_list) == 0:
|
|
79
|
+
if site_ids:
|
|
80
|
+
# User provided specific site IDs but none were found
|
|
81
|
+
print(f"No sites found matching the provided site IDs: {', '.join(site_ids)}")
|
|
82
|
+
print("Please verify the site IDs are correct and that they exist in this account.")
|
|
83
|
+
return [{"success": False, "message": f"No sites found for the specified site IDs: {', '.join(site_ids)}", "sites_requested": site_ids}]
|
|
84
|
+
else:
|
|
85
|
+
# No site filter was provided but no sites exist at all
|
|
86
|
+
print("No sites found in this account.")
|
|
87
|
+
return [{"success": False, "message": "No sites found in account", "account_id": account_id}]
|
|
88
|
+
|
|
89
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
90
|
+
if site_ids:
|
|
91
|
+
print(f"Found {len(sites_list)} site(s) matching the provided site IDs")
|
|
92
|
+
else:
|
|
93
|
+
print(f"Found {len(sites_list)} site(s) in account")
|
|
94
|
+
|
|
41
95
|
entity_network_interfaces = getEntityLookup(args, configuration, account_id, "networkInterface")
|
|
42
96
|
entity_network_ranges = getEntityLookup(args, configuration, account_id, "siteRange")
|
|
43
97
|
entity_sites = getEntityLookup(args, configuration, account_id, "site")
|
|
@@ -48,6 +102,7 @@ def export_socket_site_to_json(args, configuration):
|
|
|
48
102
|
for snapshot_site in snapshot_sites['data']['accountSnapshot']['sites']:
|
|
49
103
|
site_id = snapshot_site.get('id')
|
|
50
104
|
connectionType = snapshot_site.get('infoSiteSnapshot', {}).get('connType', "")
|
|
105
|
+
# # Placeholder code to rename what the API returns if export should support cloud deployments
|
|
51
106
|
# if connectionType=="VSOCKET_VGX_AWS":
|
|
52
107
|
# connectionType = "SOCKET_AWS1500"
|
|
53
108
|
# elif connectionType=="VSOCKET_VGX_AZURE":
|
|
@@ -81,11 +136,14 @@ def export_socket_site_to_json(args, configuration):
|
|
|
81
136
|
cur_wan_interface['id'] = site_id+":"+ wan_ni.get('id', "")
|
|
82
137
|
else:
|
|
83
138
|
cur_wan_interface['id'] = site_id+":INT_"+ wan_ni.get('id', "")
|
|
139
|
+
cur_wan_interface['index'] = wan_ni.get('id', "")
|
|
84
140
|
cur_wan_interface['name'] = wan_ni.get('name', "")
|
|
85
141
|
cur_wan_interface['upstream_bandwidth'] = wan_ni.get('upstreamBandwidth', 0)
|
|
86
142
|
cur_wan_interface['downstream_bandwidth'] = wan_ni.get('downstreamBandwidth', 0)
|
|
87
143
|
cur_wan_interface['dest_type'] = wan_ni.get('destType', "")
|
|
88
144
|
cur_wan_interface['role'] = role
|
|
145
|
+
# Not supported via API to be populated later when available
|
|
146
|
+
cur_wan_interface['precedence'] = "ACTIVE"
|
|
89
147
|
cur_site['wan_interfaces'].append(cur_wan_interface)
|
|
90
148
|
|
|
91
149
|
if site_id:
|
|
@@ -109,17 +167,21 @@ def export_socket_site_to_json(args, configuration):
|
|
|
109
167
|
lan_ni_subnet = str(lan_ni_helper_fields.get('subnet', ""))
|
|
110
168
|
ni_index = lan_ni_helper_fields.get('interfaceId', "")
|
|
111
169
|
ni_index = f"INT_{ni_index}" if isinstance(ni_index, (int, str)) and str(ni_index).isdigit() else ni_index
|
|
112
|
-
if cur_site_entry["connection_type"] in settings["default_socket_interface_map"] and ni_index in settings["default_socket_interface_map"][
|
|
170
|
+
if cur_site_entry["connection_type"] in settings["default_socket_interface_map"] and ni_index in settings["default_socket_interface_map"][cur_site_entry["connection_type"]]:
|
|
113
171
|
cur_native_range = cur_site_entry["native_range"]
|
|
114
172
|
cur_site_entry["native_range"]["interface_id"] = ni_interface_id
|
|
115
173
|
cur_site_entry["native_range"]["interface_name"] = ni_interface_name
|
|
116
174
|
cur_site_entry["native_range"]["subnet"] = lan_ni_subnet
|
|
117
175
|
cur_site_entry["native_range"]["index"] = ni_index
|
|
176
|
+
# Add entry to lan interfaces for default_lan
|
|
177
|
+
cur_site_entry['lan_interfaces'].append({"network_ranges": [],"default_lan":True})
|
|
118
178
|
else:
|
|
119
179
|
cur_lan_interface['id'] = ni_interface_id
|
|
120
180
|
cur_lan_interface['name'] = ni_interface_name
|
|
121
181
|
cur_lan_interface['index'] = ni_index
|
|
122
182
|
cur_lan_interface['dest_type'] = lan_ni_helper_fields.get('destType', "")
|
|
183
|
+
# temporarily add subnet to interface to be used later to flas native range_range
|
|
184
|
+
cur_lan_interface['subnet'] = lan_ni_subnet
|
|
123
185
|
cur_site_entry['lan_interfaces'].append(cur_lan_interface)
|
|
124
186
|
else:
|
|
125
187
|
if hasattr(args, 'verbose') and args.verbose:
|
|
@@ -135,50 +197,93 @@ def export_socket_site_to_json(args, configuration):
|
|
|
135
197
|
nr_entity_data = range.get('entity', {})
|
|
136
198
|
nr_interface_name = str(nr_helper_fields.get('interfaceName', ""))
|
|
137
199
|
nr_site_id = str(nr_helper_fields.get('siteId', ""))
|
|
200
|
+
range_id = nr_entity_data.get('id', "")
|
|
201
|
+
|
|
138
202
|
nr_site_entry = next((site for site in processed_data['sites'] if site['id'] == nr_site_id), None)
|
|
139
203
|
if nr_site_entry:
|
|
140
|
-
nr_subnet = nr_helper_fields.get('subnet',
|
|
141
|
-
nr_vlan = nr_helper_fields.get('vlanTag',
|
|
204
|
+
nr_subnet = nr_helper_fields.get('subnet', None)
|
|
205
|
+
nr_vlan = nr_helper_fields.get('vlanTag', None)
|
|
142
206
|
nr_mdns_reflector = nr_helper_fields.get('mdnsReflector', False)
|
|
143
207
|
nr_dhcp_microsegmentation = nr_helper_fields.get('microsegmentation', False)
|
|
144
|
-
|
|
208
|
+
nr_interface_name = str(nr_helper_fields.get('interfaceName', ""))
|
|
209
|
+
range_name = nr_entity_data.get('name', nr_interface_name)
|
|
145
210
|
if range_name and " \\ " in range_name:
|
|
146
211
|
range_name = range_name.split(" \\ ").pop()
|
|
147
212
|
range_id = nr_entity_data.get('id', "")
|
|
148
|
-
nr_interface_name = str(nr_helper_fields.get('interfaceName', ""))
|
|
149
213
|
|
|
150
214
|
# the following fields are missing from the schema, populating blank fields in the interim
|
|
151
|
-
nr_dhcp_type = nr_helper_fields.get('XXXXX', "")
|
|
152
|
-
nr_ip_range = nr_helper_fields.get('XXXXX',
|
|
153
|
-
nr_relay_group_id = nr_helper_fields.get('XXXXX',
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
nr_translated_subnet = nr_helper_fields.get('XXXXX',
|
|
157
|
-
nr_internet_only = nr_helper_fields.get('XXXXX',
|
|
158
|
-
nr_local_ip = nr_helper_fields.get('XXXXX',
|
|
215
|
+
nr_dhcp_type = nr_helper_fields.get('XXXXX', "DHCP_DISABLED")
|
|
216
|
+
nr_ip_range = nr_helper_fields.get('XXXXX', None)
|
|
217
|
+
# nr_relay_group_id = nr_helper_fields.get('XXXXX', None)
|
|
218
|
+
nr_relay_group_name = nr_helper_fields.get('XXXXX', None)
|
|
219
|
+
nr_gateway = nr_helper_fields.get('XXXXX', None)
|
|
220
|
+
nr_translated_subnet = nr_helper_fields.get('XXXXX', None)
|
|
221
|
+
# nr_internet_only = nr_helper_fields.get('XXXXX', None)
|
|
222
|
+
nr_local_ip = nr_helper_fields.get('XXXXX', None)
|
|
223
|
+
nr_range_type = nr_helper_fields.get('XXXXX', None)
|
|
224
|
+
# Adding logic to pre-populate with default value
|
|
225
|
+
if nr_vlan!=None:
|
|
226
|
+
nr_range_type="VLAN"
|
|
227
|
+
else:
|
|
228
|
+
nr_range_type="Direct"
|
|
229
|
+
|
|
230
|
+
# Calculate local IP from subnet if --calculate-local-ip flag is set
|
|
231
|
+
if hasattr(args, 'calculate_local_ip') and args.calculate_local_ip and nr_subnet:
|
|
232
|
+
calculated_ip = calculateLocalIp(nr_subnet)
|
|
233
|
+
if calculated_ip:
|
|
234
|
+
nr_local_ip = calculated_ip
|
|
235
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
236
|
+
print(f" Calculated local IP for subnet {nr_subnet}: {calculated_ip}")
|
|
159
237
|
|
|
160
238
|
site_native_range = nr_site_entry.get('native_range', {}) if nr_site_entry else {}
|
|
161
239
|
|
|
162
240
|
if site_native_range.get("interface_name", "") == nr_interface_name:
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
241
|
+
if range_name!="Native Range":
|
|
242
|
+
nr_lan_interface_entry = next((lan_nic for lan_nic in nr_site_entry["lan_interfaces"] if 'default_lan' in lan_nic and lan_nic['default_lan']), None)
|
|
243
|
+
# print(f"checking range: {network_range_site_id} - {network_range_interface_name}")
|
|
244
|
+
if nr_lan_interface_entry:
|
|
245
|
+
cur_range = {}
|
|
246
|
+
cur_range['id'] = range_id
|
|
247
|
+
cur_range['name'] = range_name
|
|
248
|
+
cur_range['subnet'] = nr_subnet
|
|
249
|
+
cur_range['vlan'] = nr_vlan
|
|
250
|
+
cur_range['mdns_reflector'] = nr_mdns_reflector
|
|
251
|
+
## The folliowing fields are missing from the schema, populating blank fields in the interim
|
|
252
|
+
cur_range['gateway'] = nr_gateway
|
|
253
|
+
cur_range['range_type'] = nr_range_type
|
|
254
|
+
cur_range['translated_subnet'] = nr_translated_subnet
|
|
255
|
+
# # Not available to set for native_range via API today
|
|
256
|
+
# cur_range['internet_only'] = nr_internet_only
|
|
257
|
+
cur_range['local_ip'] = nr_local_ip # Use the calculated or original value
|
|
258
|
+
cur_range['dhcp_settings'] = {
|
|
259
|
+
'dhcp_type': nr_dhcp_type,
|
|
260
|
+
'ip_range': nr_ip_range,
|
|
261
|
+
'relay_group_id': None,
|
|
262
|
+
'relay_group_name': nr_relay_group_name,
|
|
263
|
+
'dhcp_microsegmentation': nr_dhcp_microsegmentation
|
|
264
|
+
}
|
|
265
|
+
nr_lan_interface_entry["network_ranges"].append(cur_range)
|
|
266
|
+
else:
|
|
267
|
+
site_native_range['range_name'] = range_name
|
|
268
|
+
site_native_range['range_id'] = range_id
|
|
269
|
+
site_native_range['vlan'] = nr_vlan
|
|
270
|
+
site_native_range['mdns_reflector'] = nr_mdns_reflector
|
|
271
|
+
# site_native_range['dhcp_microsegmentation'] = nr_dhcp_microsegmentation
|
|
272
|
+
site_native_range['gateway'] = nr_gateway
|
|
273
|
+
site_native_range['range_type'] = nr_range_type
|
|
274
|
+
site_native_range['translated_subnet'] = nr_translated_subnet
|
|
275
|
+
# # Not available to set for native_range via API today
|
|
276
|
+
# site_native_range['internet_only'] = nr_internet_only
|
|
277
|
+
site_native_range['local_ip'] = nr_local_ip
|
|
278
|
+
site_native_range['dhcp_settings'] = {
|
|
279
|
+
'dhcp_type': nr_dhcp_type,
|
|
280
|
+
'ip_range': nr_ip_range,
|
|
281
|
+
'relay_group_id': None,
|
|
282
|
+
'relay_group_name': nr_relay_group_name,
|
|
283
|
+
'dhcp_microsegmentation': nr_dhcp_microsegmentation
|
|
284
|
+
}
|
|
179
285
|
else:
|
|
180
|
-
nr_lan_interface_entry = next((lan_nic for lan_nic in nr_site_entry["lan_interfaces"] if lan_nic['name'] == nr_interface_name), None)
|
|
181
|
-
# print(f"checking range: {network_range_site_id} - {network_range_interface_name}")
|
|
286
|
+
nr_lan_interface_entry = next((lan_nic for lan_nic in nr_site_entry["lan_interfaces"] if ('default_lan' not in lan_nic or not lan_nic['default_lan']) and lan_nic['name'] == nr_interface_name), None)
|
|
182
287
|
if nr_lan_interface_entry:
|
|
183
288
|
cur_range = {}
|
|
184
289
|
cur_range['id'] = range_id
|
|
@@ -187,30 +292,62 @@ def export_socket_site_to_json(args, configuration):
|
|
|
187
292
|
cur_range['vlan'] = nr_vlan
|
|
188
293
|
cur_range['mdns_reflector'] = nr_mdns_reflector
|
|
189
294
|
## The folliowing fields are missing from the schema, populating blank fields in the interim
|
|
190
|
-
cur_range['gateway'] =
|
|
191
|
-
cur_range['range_type'] =
|
|
192
|
-
cur_range['translated_subnet'] =
|
|
193
|
-
|
|
194
|
-
cur_range['
|
|
295
|
+
cur_range['gateway'] = nr_gateway
|
|
296
|
+
cur_range['range_type'] = nr_range_type
|
|
297
|
+
cur_range['translated_subnet'] = nr_translated_subnet
|
|
298
|
+
# # Not available to set for native_range via API today
|
|
299
|
+
# cur_range['internet_only'] = nr_internet_only
|
|
300
|
+
cur_range['local_ip'] = nr_local_ip # Use the calculated or original value
|
|
195
301
|
cur_range['dhcp_settings'] = {
|
|
196
|
-
'dhcp_type':
|
|
197
|
-
'ip_range':
|
|
198
|
-
'relay_group_id':
|
|
302
|
+
'dhcp_type': nr_dhcp_type,
|
|
303
|
+
'ip_range': nr_ip_range,
|
|
304
|
+
'relay_group_id': None,
|
|
305
|
+
'relay_group_name': nr_relay_group_name,
|
|
199
306
|
'dhcp_microsegmentation': nr_dhcp_microsegmentation
|
|
200
307
|
}
|
|
308
|
+
# DEBUG
|
|
309
|
+
# print(json.dumps(nr_lan_interface_entry,indent=4,sort_keys=True))
|
|
310
|
+
# print("nr_subnet",nr_subnet)
|
|
311
|
+
# print('nr_lan_interface_entry["subnet"]='+nr_lan_interface_entry["subnet"])
|
|
312
|
+
# print(json.dumps(nr_lan_interface_entry,indent=4,sort_keys=True))
|
|
313
|
+
if "subnet" in nr_lan_interface_entry and nr_subnet==nr_lan_interface_entry["subnet"]:
|
|
314
|
+
cur_range['native_range'] = True
|
|
315
|
+
del nr_lan_interface_entry["subnet"]
|
|
316
|
+
|
|
201
317
|
nr_lan_interface_entry["network_ranges"].append(cur_range)
|
|
202
318
|
else:
|
|
203
|
-
|
|
204
|
-
|
|
319
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
320
|
+
print(f"Skipping range {nr_entity_data.get('id', '')}: site_id {nr_site_id} and {nr_interface_name} not found in ")
|
|
205
321
|
else:
|
|
206
322
|
if hasattr(args, 'verbose') and args.verbose:
|
|
207
323
|
print(f"Skipping range, site_id is unsupported for export {nr_site_id}")
|
|
208
324
|
|
|
209
|
-
# Handle
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
325
|
+
# Handle custom filename and timestamp
|
|
326
|
+
if hasattr(args, 'json_filename') and args.json_filename:
|
|
327
|
+
# User provided custom filename
|
|
328
|
+
base_filename = args.json_filename
|
|
329
|
+
# Remove .json extension if provided, we'll add it back
|
|
330
|
+
if base_filename.endswith('.json'):
|
|
331
|
+
base_filename = base_filename[:-5]
|
|
332
|
+
|
|
333
|
+
if hasattr(args, 'append_timestamp') and args.append_timestamp:
|
|
334
|
+
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
|
335
|
+
filename_template = f"{base_filename}_{timestamp}.json"
|
|
336
|
+
else:
|
|
337
|
+
filename_template = f"{base_filename}.json"
|
|
338
|
+
else:
|
|
339
|
+
# Use default filename template
|
|
340
|
+
if hasattr(args, 'append_timestamp') and args.append_timestamp:
|
|
341
|
+
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
|
342
|
+
filename_template = f"socket_sites_{{account_id}}_{timestamp}.json"
|
|
343
|
+
else:
|
|
344
|
+
filename_template = "socket_sites_{account_id}.json"
|
|
345
|
+
|
|
346
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
347
|
+
if hasattr(args, 'json_filename') and args.json_filename:
|
|
348
|
+
print(f"Using custom filename template: {filename_template}")
|
|
349
|
+
else:
|
|
350
|
+
print(f"Using default filename template: {filename_template}")
|
|
214
351
|
|
|
215
352
|
# Write the processed data to file using the general-purpose function
|
|
216
353
|
output_file = writeDataToFile(
|
|
@@ -277,13 +414,16 @@ def populateSiteLocationData(args, site_data, cur_site):
|
|
|
277
414
|
if hasattr(args, 'verbose') and args.verbose:
|
|
278
415
|
print(f"Warning: Could not load site location data: {e}")
|
|
279
416
|
|
|
417
|
+
address = site_data.get('infoSiteSnapshot', {}).get('address')
|
|
418
|
+
city = site_data.get('infoSiteSnapshot', {}).get('cityName')
|
|
419
|
+
|
|
280
420
|
## siteLocation attributes
|
|
281
421
|
cur_site['site_location'] = {}
|
|
282
|
-
cur_site['site_location']['address'] = site_data.get('infoSiteSnapshot', {}).get('address')
|
|
283
|
-
cur_site['site_location']['city'] = site_data.get('infoSiteSnapshot', {}).get('cityName')
|
|
284
422
|
cur_site['site_location']['stateName'] = site_data.get('infoSiteSnapshot', {}).get('countryStateName')
|
|
285
423
|
cur_site['site_location']['countryCode'] = site_data.get('infoSiteSnapshot', {}).get('countryCode')
|
|
286
424
|
cur_site['site_location']['countryName'] = site_data.get('infoSiteSnapshot', {}).get('countryName')
|
|
425
|
+
cur_site['site_location']['address'] = address if address != "" else None
|
|
426
|
+
cur_site['site_location']['city'] = city if city != "" else None
|
|
287
427
|
|
|
288
428
|
# Look up timezone and state code from location data
|
|
289
429
|
country_name = cur_site['site_location']['countryName']
|
|
@@ -303,6 +443,9 @@ def populateSiteLocationData(args, site_data, cur_site):
|
|
|
303
443
|
# Look up location details
|
|
304
444
|
location_data = site_location_data.get(lookup_key, {})
|
|
305
445
|
|
|
446
|
+
# Now that location_data is defined, we can set stateCode
|
|
447
|
+
cur_site['site_location']['stateCode'] = location_data.get('stateCode', None)
|
|
448
|
+
|
|
306
449
|
if hasattr(args, 'verbose') and args.verbose:
|
|
307
450
|
if location_data:
|
|
308
451
|
print(f" Found location data: {location_data}")
|
|
@@ -313,8 +456,7 @@ def populateSiteLocationData(args, site_data, cur_site):
|
|
|
313
456
|
if similar_keys:
|
|
314
457
|
print(f" Similar keys found: {similar_keys}")
|
|
315
458
|
|
|
316
|
-
|
|
317
|
-
|
|
459
|
+
|
|
318
460
|
# Get timezone - always use the 0 element in the timezones array
|
|
319
461
|
timezones = location_data.get('timezone', [])
|
|
320
462
|
cur_site['site_location']['timezone'] = timezones[0] if timezones else None
|
|
@@ -372,6 +514,7 @@ def getAccountSnapshot(args, configuration, account_id, site_ids=None):
|
|
|
372
514
|
raise ValueError("Failed to retrieve snapshot data from API")
|
|
373
515
|
|
|
374
516
|
if not response or 'sites' not in response['data']['accountSnapshot'] or response['data']['accountSnapshot']['sites'] is None:
|
|
375
|
-
|
|
517
|
+
# Instead of raising an exception, return an empty response structure
|
|
518
|
+
response['data']['accountSnapshot']['sites'] = []
|
|
376
519
|
|
|
377
520
|
return response
|